From d123b6174743f080a0eb8264b224569eaf952550 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 5 Dec 2023 17:16:35 +0100 Subject: [PATCH 1/6] feat(ingest/athena): Enable Athena view ingestion and view lineage (#9354) --- .../datahub/ingestion/source/sql/athena.py | 25 +++++++++++++++++-- .../ingestion/source/sql/sql_common.py | 2 ++ .../src/datahub/utilities/sqlglot_lineage.py | 2 ++ 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py index ac0e2bd4bb8a9..c3759875b2769 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/athena.py @@ -8,7 +8,8 @@ from pyathena.common import BaseCursor from pyathena.model import AthenaTableMetadata from pyathena.sqlalchemy_athena import AthenaRestDialect -from sqlalchemy import create_engine, inspect, types +from sqlalchemy import create_engine, exc, inspect, text, types +from sqlalchemy.engine import reflection from sqlalchemy.engine.reflection import Inspector from sqlalchemy.types import TypeEngine from sqlalchemy_bigquery import STRUCT @@ -64,6 +65,22 @@ class CustomAthenaRestDialect(AthenaRestDialect): # regex to identify complex types in DDL strings which are embedded in `<>`. _complex_type_pattern = re.compile(r"(<.+>)") + @typing.no_type_check + @reflection.cache + def get_view_definition(self, connection, view_name, schema=None, **kw): + # This method was backported from PyAthena v3.0.7 to allow to retrieve the view definition + # from Athena. This is required until we support sqlalchemy > 2.0 + # https://github.com/laughingman7743/PyAthena/blob/509dd37d0fd15ad603993482cc47b8549b82facd/pyathena/sqlalchemy/base.py#L1118 + raw_connection = self._raw_connection(connection) + schema = schema if schema else raw_connection.schema_name # type: ignore + query = f"""SHOW CREATE VIEW "{schema}"."{view_name}";""" + try: + res = connection.scalars(text(query)) + except exc.OperationalError as e: + raise exc.NoSuchTableError(f"{schema}.{view_name}") from e + else: + return "\n".join([r for r in res]) + @typing.no_type_check def _get_column_type( self, type_: Union[str, Dict[str, Any]] @@ -236,7 +253,7 @@ class AthenaConfig(SQLCommonConfig): # overwrite default behavior of SQLAlchemyConfing include_views: Optional[bool] = pydantic.Field( - default=False, description="Whether views should be ingested." + default=True, description="Whether views should be ingested." ) _s3_staging_dir_population = pydantic_renamed_field( @@ -303,6 +320,10 @@ def get_inspectors(self) -> Iterable[Inspector]: inspector = inspect(conn) yield inspector + def get_db_schema(self, dataset_identifier: str) -> Tuple[Optional[str], str]: + schema, _view = dataset_identifier.split(".", 1) + return None, schema + def get_table_properties( self, inspector: Inspector, schema: str, table: str ) -> Tuple[Optional[str], Dict[str, str], Optional[str]]: diff --git a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py index 67af6b2010c83..590bc7f696784 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py +++ b/metadata-ingestion/src/datahub/ingestion/source/sql/sql_common.py @@ -371,6 +371,8 @@ def get_db_name(self, inspector: Inspector) -> str: engine = inspector.engine if engine and hasattr(engine, "url") and hasattr(engine.url, "database"): + if engine.url.database is None: + return "" return str(engine.url.database).strip('"').lower() else: raise Exception("Unable to get database name from Sqlalchemy inspector") diff --git a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py index c2cccf9f1e389..fc3efef2ba532 100644 --- a/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py +++ b/metadata-ingestion/src/datahub/utilities/sqlglot_lineage.py @@ -962,6 +962,8 @@ def _get_dialect(platform: str) -> str: return "hive" if platform == "mssql": return "tsql" + if platform == "athena": + return "trino" else: return platform From 3ee82590cd2ab7da08b5ad8b19b1e4dd988023d9 Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Tue, 5 Dec 2023 17:42:29 +0100 Subject: [PATCH 2/6] fix(ingest/redshift): Identify materialized views properly + fix connection args support (#9368) --- .../docs/sources/redshift/redshift_recipe.yml | 4 +- metadata-ingestion/setup.py | 8 +- .../ingestion/source/redshift/config.py | 23 ++++- .../ingestion/source/redshift/query.py | 18 +++- .../ingestion/source/redshift/redshift.py | 6 +- .../source/redshift/redshift_schema.py | 98 +++++++++++++------ 6 files changed, 109 insertions(+), 48 deletions(-) diff --git a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml index be704e6759d41..a561405d3de47 100644 --- a/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml +++ b/metadata-ingestion/docs/sources/redshift/redshift_recipe.yml @@ -40,8 +40,8 @@ source: options: connect_args: - sslmode: "prefer" # or "require" or "verify-ca" - sslrootcert: ~ # needed to unpin the AWS Redshift certificate + # check all available options here: https://pypi.org/project/redshift-connector/ + ssl_insecure: "false" # Specifies if IDP hosts server certificate will be verified sink: # sink configs diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 4f5f09fb148fa..416b255fb763f 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -181,8 +181,8 @@ redshift_common = { # Clickhouse 0.8.3 adds support for SQLAlchemy 1.4.x "sqlalchemy-redshift>=0.8.3", - "psycopg2-binary", "GeoAlchemy2", + "redshift-connector", *sqllineage_lib, *path_spec_common, } @@ -365,11 +365,7 @@ | {"psycopg2-binary", "pymysql>=1.0.2"}, "pulsar": {"requests"}, "redash": {"redash-toolbelt", "sql-metadata"} | sqllineage_lib, - "redshift": sql_common - | redshift_common - | usage_common - | {"redshift-connector"} - | sqlglot_lib, + "redshift": sql_common | redshift_common | usage_common | sqlglot_lib, "s3": {*s3_base, *data_lake_profiling}, "gcs": {*s3_base, *data_lake_profiling}, "sagemaker": aws_common, diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py index 95038ef2c6212..51ad8a050adc2 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/config.py @@ -82,7 +82,7 @@ class RedshiftConfig( # large Redshift warehouses. As an example, see this query for the columns: # https://github.com/sqlalchemy-redshift/sqlalchemy-redshift/blob/60b4db04c1d26071c291aeea52f1dcb5dd8b0eb0/sqlalchemy_redshift/dialect.py#L745. scheme: str = Field( - default="redshift+psycopg2", + default="redshift+redshift_connector", description="", hidden_from_schema=True, ) @@ -170,3 +170,24 @@ def backward_compatibility_configs_set(cls, values: Dict) -> Dict: "The config option `match_fully_qualified_names` will be deprecated in future and the default behavior will assume `match_fully_qualified_names: True`." ) return values + + @root_validator(skip_on_failure=True) + def connection_config_compatibility_set(cls, values: Dict) -> Dict: + if ( + ("options" in values and "connect_args" in values["options"]) + and "extra_client_options" in values + and len(values["extra_client_options"]) > 0 + ): + raise ValueError( + "Cannot set both `connect_args` and `extra_client_options` in the config. Please use `extra_client_options` only." + ) + + if "options" in values and "connect_args" in values["options"]: + values["extra_client_options"] = values["options"]["connect_args"] + + if values["extra_client_options"]: + if values["options"]: + values["options"]["connect_args"] = values["extra_client_options"] + else: + values["options"] = {"connect_args": values["extra_client_options"]} + return values diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py index a96171caf9835..92e36fffd6bb4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/query.py @@ -179,14 +179,18 @@ class RedshiftQuery: additional_table_metadata: str = """ select - database, - schema, + ti.database, + ti.schema, "table", size, tbl_rows, estimated_visible_rows, skew_rows, - last_accessed + last_accessed, + case + when smi.name is not null then 1 + else 0 + end as is_materialized from pg_catalog.svv_table_info as ti left join ( @@ -198,8 +202,12 @@ class RedshiftQuery: group by tbl) as la on (la.tbl = ti.table_id) - ; - """ + left join stv_mv_info smi on + smi.db_name = ti.database + and smi.schema = ti.schema + and smi.name = ti.table + ; +""" @staticmethod def stl_scan_based_lineage_query( diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py index 04f0edf504595..eb635b1292b81 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift.py @@ -6,7 +6,6 @@ import humanfriendly # These imports verify that the dependencies are available. -import psycopg2 # noqa: F401 import pydantic import redshift_connector @@ -352,7 +351,6 @@ def create(cls, config_dict, ctx): def get_redshift_connection( config: RedshiftConfig, ) -> redshift_connector.Connection: - client_options = config.extra_client_options host, port = config.host_port.split(":") conn = redshift_connector.connect( host=host, @@ -360,7 +358,7 @@ def get_redshift_connection( user=config.username, database=config.database, password=config.password.get_secret_value() if config.password else None, - **client_options, + **config.extra_client_options, ) conn.autocommit = True @@ -641,7 +639,7 @@ def gen_view_dataset_workunits( dataset_urn = self.gen_dataset_urn(datahub_dataset_name) if view.ddl: view_properties_aspect = ViewProperties( - materialized=view.type == "VIEW_MATERIALIZED", + materialized=view.materialized, viewLanguage="SQL", viewLogic=view.ddl, ) diff --git a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py index 4a13d17d2cc0f..ca81682ae00e4 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py +++ b/metadata-ingestion/src/datahub/ingestion/source/redshift/redshift_schema.py @@ -40,6 +40,7 @@ class RedshiftTable(BaseTable): @dataclass class RedshiftView(BaseTable): type: Optional[str] = None + materialized: bool = False columns: List[RedshiftColumn] = field(default_factory=list) last_altered: Optional[datetime] = None size_in_bytes: Optional[int] = None @@ -66,6 +67,7 @@ class RedshiftExtraTableMeta: estimated_visible_rows: Optional[int] = None skew_rows: Optional[float] = None last_accessed: Optional[datetime] = None + is_materialized: bool = False @dataclass @@ -148,6 +150,7 @@ def enrich_tables( ], skew_rows=meta[field_names.index("skew_rows")], last_accessed=meta[field_names.index("last_accessed")], + is_materialized=meta[field_names.index("is_materialized")], ) if table_meta.schema not in table_enrich: table_enrich.setdefault(table_meta.schema, {}) @@ -173,42 +176,23 @@ def get_tables_and_views( logger.info(f"Fetched {len(db_tables)} tables/views from Redshift") for table in db_tables: schema = table[field_names.index("schema")] + table_name = table[field_names.index("relname")] + if table[field_names.index("tabletype")] not in [ "MATERIALIZED VIEW", "VIEW", ]: if schema not in tables: tables.setdefault(schema, []) - table_name = table[field_names.index("relname")] - - creation_time: Optional[datetime] = None - if table[field_names.index("creation_time")]: - creation_time = table[field_names.index("creation_time")].replace( - tzinfo=timezone.utc - ) - last_altered: Optional[datetime] = None - size_in_bytes: Optional[int] = None - rows_count: Optional[int] = None - if schema in enriched_table and table_name in enriched_table[schema]: - if enriched_table[schema][table_name].last_accessed: - # Mypy seems to be not clever enough to understand the above check - last_accessed = enriched_table[schema][table_name].last_accessed - assert last_accessed - last_altered = last_accessed.replace(tzinfo=timezone.utc) - elif creation_time: - last_altered = creation_time - - if enriched_table[schema][table_name].size: - # Mypy seems to be not clever enough to understand the above check - size = enriched_table[schema][table_name].size - if size: - size_in_bytes = size * 1024 * 1024 - - if enriched_table[schema][table_name].estimated_visible_rows: - rows = enriched_table[schema][table_name].estimated_visible_rows - assert rows - rows_count = int(rows) + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table, field_names, schema, table + ) tables[schema].append( RedshiftTable( @@ -231,16 +215,37 @@ def get_tables_and_views( else: if schema not in views: views[schema] = [] + ( + creation_time, + last_altered, + rows_count, + size_in_bytes, + ) = RedshiftDataDictionary.get_table_stats( + enriched_table=enriched_table, + field_names=field_names, + schema=schema, + table=table, + ) + + materialized = False + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].is_materialized: + materialized = True views[schema].append( RedshiftView( type=table[field_names.index("tabletype")], name=table[field_names.index("relname")], ddl=table[field_names.index("view_definition")], - created=table[field_names.index("creation_time")], + created=creation_time, comment=table[field_names.index("table_description")], + last_altered=last_altered, + size_in_bytes=size_in_bytes, + rows_count=rows_count, + materialized=materialized, ) ) + for schema_key, schema_tables in tables.items(): logger.info( f"In schema: {schema_key} discovered {len(schema_tables)} tables" @@ -250,6 +255,39 @@ def get_tables_and_views( return tables, views + @staticmethod + def get_table_stats(enriched_table, field_names, schema, table): + table_name = table[field_names.index("relname")] + + creation_time: Optional[datetime] = None + if table[field_names.index("creation_time")]: + creation_time = table[field_names.index("creation_time")].replace( + tzinfo=timezone.utc + ) + last_altered: Optional[datetime] = None + size_in_bytes: Optional[int] = None + rows_count: Optional[int] = None + if schema in enriched_table and table_name in enriched_table[schema]: + if enriched_table[schema][table_name].last_accessed: + # Mypy seems to be not clever enough to understand the above check + last_accessed = enriched_table[schema][table_name].last_accessed + assert last_accessed + last_altered = last_accessed.replace(tzinfo=timezone.utc) + elif creation_time: + last_altered = creation_time + + if enriched_table[schema][table_name].size: + # Mypy seems to be not clever enough to understand the above check + size = enriched_table[schema][table_name].size + if size: + size_in_bytes = size * 1024 * 1024 + + if enriched_table[schema][table_name].estimated_visible_rows: + rows = enriched_table[schema][table_name].estimated_visible_rows + assert rows + rows_count = int(rows) + return creation_time, last_altered, rows_count, size_in_bytes + @staticmethod def get_schema_fields_for_column( column: RedshiftColumn, From 806f09ae23b1a569006be9eaf8d13165e67742b3 Mon Sep 17 00:00:00 2001 From: Andrew Sikowitz Date: Tue, 5 Dec 2023 12:33:00 -0500 Subject: [PATCH 3/6] test(ingest/unity): Unity catalog data generation (#8949) --- metadata-ingestion/setup.py | 1 + .../performance/bigquery/bigquery_events.py | 10 +- .../tests/performance/data_generation.py | 153 ++++++++++----- .../tests/performance/data_model.py | 54 +++++- .../tests/performance/databricks/generator.py | 177 ++++++++++++++++++ .../databricks/unity_proxy_mock.py | 73 ++++---- .../tests/unit/test_bigquery_source.py | 2 +- .../tests/unit/test_bigquery_usage.py | 14 +- 8 files changed, 383 insertions(+), 101 deletions(-) create mode 100644 metadata-ingestion/tests/performance/databricks/generator.py diff --git a/metadata-ingestion/setup.py b/metadata-ingestion/setup.py index 416b255fb763f..69cbe8d823450 100644 --- a/metadata-ingestion/setup.py +++ b/metadata-ingestion/setup.py @@ -262,6 +262,7 @@ "databricks-sdk>=0.9.0", "pyspark~=3.3.0", "requests", + "databricks-sql-connector", } mysql = sql_common | {"pymysql>=1.0.2"} diff --git a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py index d9b5571a8015f..0e0bfe78c260f 100644 --- a/metadata-ingestion/tests/performance/bigquery/bigquery_events.py +++ b/metadata-ingestion/tests/performance/bigquery/bigquery_events.py @@ -2,7 +2,7 @@ import random import uuid from collections import defaultdict -from typing import Dict, Iterable, List, cast +from typing import Dict, Iterable, List, Set from typing_extensions import get_args @@ -15,7 +15,7 @@ ) from datahub.ingestion.source.bigquery_v2.bigquery_config import BigQueryV2Config from datahub.ingestion.source.bigquery_v2.usage import OPERATION_STATEMENT_TYPES -from tests.performance.data_model import Query, StatementType, Table, View +from tests.performance.data_model import Query, StatementType, Table # https://cloud.google.com/bigquery/docs/reference/auditlogs/rest/Shared.Types/BigQueryAuditMetadata.TableDataRead.Reason READ_REASONS = [ @@ -86,7 +86,7 @@ def generate_events( ref_from_table(parent, table_to_project) for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), referencedViews=referencedViews, @@ -96,7 +96,7 @@ def generate_events( query_on_view=True if referencedViews else False, ) ) - table_accesses = defaultdict(set) + table_accesses: Dict[BigQueryTableRef, Set[str]] = defaultdict(set) for field in query.fields_accessed: if not field.table.is_view(): table_accesses[ref_from_table(field.table, table_to_project)].add( @@ -104,7 +104,7 @@ def generate_events( ) else: # assuming that same fields are accessed in parent tables - for parent in cast(View, field.table).parents: + for parent in field.table.upstreams: table_accesses[ref_from_table(parent, table_to_project)].add( field.column ) diff --git a/metadata-ingestion/tests/performance/data_generation.py b/metadata-ingestion/tests/performance/data_generation.py index 67b156896909a..9b80d6260d408 100644 --- a/metadata-ingestion/tests/performance/data_generation.py +++ b/metadata-ingestion/tests/performance/data_generation.py @@ -8,16 +8,16 @@ This is a work in progress, built piecemeal as needed. """ import random -import uuid +from abc import ABCMeta, abstractmethod +from collections import OrderedDict from dataclasses import dataclass from datetime import datetime, timedelta, timezone -from typing import Iterable, List, TypeVar, Union, cast +from typing import Collection, Iterable, List, Optional, TypeVar, Union, cast from faker import Faker from tests.performance.data_model import ( Column, - ColumnMapping, ColumnType, Container, FieldAccess, @@ -40,17 +40,46 @@ "UNKNOWN", ] +ID_COLUMN = "id" # Use to allow joins between all tables + + +class Distribution(metaclass=ABCMeta): + @abstractmethod + def _sample(self) -> int: + raise NotImplementedError + + def sample( + self, *, floor: Optional[int] = None, ceiling: Optional[int] = None + ) -> int: + value = self._sample() + if floor is not None: + value = max(value, floor) + if ceiling is not None: + value = min(value, ceiling) + return value + @dataclass(frozen=True) -class NormalDistribution: +class NormalDistribution(Distribution): mu: float sigma: float - def sample(self) -> int: + def _sample(self) -> int: return int(random.gauss(mu=self.mu, sigma=self.sigma)) - def sample_with_floor(self, floor: int = 1) -> int: - return max(int(random.gauss(mu=self.mu, sigma=self.sigma)), floor) + +@dataclass(frozen=True) +class LomaxDistribution(Distribution): + """See https://en.wikipedia.org/wiki/Lomax_distribution. + + Equivalent to pareto(scale, shape) - scale; scale * beta_prime(1, shape) + """ + + scale: float + shape: float + + def _sample(self) -> int: + return int(self.scale * (random.paretovariate(self.shape) - 1)) @dataclass @@ -72,9 +101,9 @@ def generate_data( num_containers: Union[List[int], int], num_tables: int, num_views: int, - columns_per_table: NormalDistribution = NormalDistribution(5, 2), - parents_per_view: NormalDistribution = NormalDistribution(2, 1), - view_definition_length: NormalDistribution = NormalDistribution(150, 50), + columns_per_table: Distribution = NormalDistribution(5, 2), + parents_per_view: Distribution = NormalDistribution(2, 1), + view_definition_length: Distribution = NormalDistribution(150, 50), time_range: timedelta = timedelta(days=14), ) -> SeedMetadata: # Assemble containers @@ -85,43 +114,32 @@ def generate_data( for i, num_in_layer in enumerate(num_containers): layer = [ Container( - f"{i}-container-{j}", + f"{_container_type(i)}_{j}", parent=random.choice(containers[-1]) if containers else None, ) for j in range(num_in_layer) ] containers.append(layer) - # Assemble tables + # Assemble tables and views, lineage, and definitions tables = [ - Table( - f"table-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - ) - for i in range(num_tables) + _generate_table(i, containers[-1], columns_per_table) for i in range(num_tables) ] views = [ View( - f"view-{i}", - container=random.choice(containers[-1]), - columns=[ - f"column-{j}-{uuid.uuid4()}" - for j in range(columns_per_table.sample_with_floor()) - ], - column_mapping=None, - definition=f"{uuid.uuid4()}-{'*' * view_definition_length.sample_with_floor(10)}", - parents=random.sample(tables, parents_per_view.sample_with_floor()), + **{ # type: ignore + **_generate_table(i, containers[-1], columns_per_table).__dict__, + "name": f"view_{i}", + "definition": f"--{'*' * view_definition_length.sample(floor=0)}", + }, ) for i in range(num_views) ] - for table in tables + views: - _generate_column_mapping(table) + for view in views: + view.upstreams = random.sample(tables, k=parents_per_view.sample(floor=1)) + + generate_lineage(tables, views) now = datetime.now(tz=timezone.utc) return SeedMetadata( @@ -133,6 +151,33 @@ def generate_data( ) +def generate_lineage( + tables: Collection[Table], + views: Collection[Table], + # Percentiles: 75th=0, 80th=1, 95th=2, 99th=4, 99.99th=15 + upstream_distribution: Distribution = LomaxDistribution(scale=3, shape=5), +) -> None: + num_upstreams = [upstream_distribution.sample(ceiling=100) for _ in tables] + # Prioritize tables with a lot of upstreams themselves + factor = 1 + len(tables) // 10 + table_weights = [1 + (num_upstreams[i] * factor) for i in range(len(tables))] + view_weights = [1] * len(views) + + # TODO: Python 3.9 use random.sample with counts + sample = [] + for table, weight in zip(tables, table_weights): + for _ in range(weight): + sample.append(table) + for view, weight in zip(views, view_weights): + for _ in range(weight): + sample.append(view) + for i, table in enumerate(tables): + table.upstreams = random.sample( # type: ignore + sample, + k=num_upstreams[i], + ) + + def generate_queries( seed_metadata: SeedMetadata, num_selects: int, @@ -146,12 +191,12 @@ def generate_queries( ) -> Iterable[Query]: faker = Faker() query_texts = [ - faker.paragraph(query_length.sample_with_floor(30) // 30) + faker.paragraph(query_length.sample(floor=30) // 30) for _ in range(num_unique_queries) ] all_tables = seed_metadata.tables + seed_metadata.views - users = [f"user-{i}@xyz.com" for i in range(num_users)] + users = [f"user_{i}@xyz.com" for i in range(num_users)] for i in range(num_selects): # Pure SELECT statements tables = _sample_list(all_tables, tables_per_select) all_columns = [ @@ -191,21 +236,43 @@ def generate_queries( ) -def _generate_column_mapping(table: Table) -> ColumnMapping: - d = {} - for column in table.columns: - d[column] = Column( - name=column, +def _container_type(i: int) -> str: + if i == 0: + return "database" + elif i == 1: + return "schema" + else: + return f"{i}container" + + +def _generate_table( + i: int, parents: List[Container], columns_per_table: Distribution +) -> Table: + num_columns = columns_per_table.sample(floor=1) + + columns = OrderedDict({ID_COLUMN: Column(ID_COLUMN, ColumnType.INTEGER, False)}) + for j in range(num_columns): + name = f"column_{j}" + columns[name] = Column( + name=name, type=random.choice(list(ColumnType)), nullable=random.random() < 0.1, # Fixed 10% chance for now ) - table.column_mapping = d - return d + return Table( + f"table_{i}", + container=random.choice(parents), + columns=columns, + upstreams=[], + ) def _sample_list(lst: List[T], dist: NormalDistribution, floor: int = 1) -> List[T]: - return random.sample(lst, min(dist.sample_with_floor(floor), len(lst))) + return random.sample(lst, min(dist.sample(floor=floor), len(lst))) def _random_time_between(start: datetime, end: datetime) -> datetime: return start + timedelta(seconds=(end - start).total_seconds() * random.random()) + + +if __name__ == "__main__": + z = generate_data(10, 1000, 10) diff --git a/metadata-ingestion/tests/performance/data_model.py b/metadata-ingestion/tests/performance/data_model.py index 9425fa827070e..728bb6ddde215 100644 --- a/metadata-ingestion/tests/performance/data_model.py +++ b/metadata-ingestion/tests/performance/data_model.py @@ -1,7 +1,9 @@ -from dataclasses import dataclass +import typing +from collections import OrderedDict +from dataclasses import dataclass, field from datetime import datetime from enum import Enum -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from typing_extensions import Literal @@ -37,29 +39,63 @@ class ColumnType(str, Enum): @dataclass class Column: name: str - type: ColumnType - nullable: bool + type: ColumnType = ColumnType.STRING + nullable: bool = False ColumnRef = str ColumnMapping = Dict[ColumnRef, Column] -@dataclass +@dataclass(init=False) class Table: name: str container: Container - columns: List[ColumnRef] - column_mapping: Optional[ColumnMapping] + columns: typing.OrderedDict[ColumnRef, Column] = field(repr=False) + upstreams: List["Table"] = field(repr=False) + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + ): + self.name = name + self.container = container + self.upstreams = upstreams + if isinstance(columns, list): + self.columns = OrderedDict((col, Column(col)) for col in columns) + elif isinstance(columns, dict): + self.columns = OrderedDict(columns) + + @property + def name_components(self) -> List[str]: + lst = [self.name] + container: Optional[Container] = self.container + while container: + lst.append(container.name) + container = container.parent + return lst[::-1] def is_view(self) -> bool: return False -@dataclass +@dataclass(init=False) class View(Table): definition: str - parents: List[Table] + + def __init__( + self, + name: str, + container: Container, + columns: Union[List[str], Dict[str, Column]], + upstreams: List["Table"], + definition: str, + ): + super().__init__(name, container, columns, upstreams) + self.definition = definition def is_view(self) -> bool: return True diff --git a/metadata-ingestion/tests/performance/databricks/generator.py b/metadata-ingestion/tests/performance/databricks/generator.py new file mode 100644 index 0000000000000..29df325d856a1 --- /dev/null +++ b/metadata-ingestion/tests/performance/databricks/generator.py @@ -0,0 +1,177 @@ +import logging +import random +import string +from concurrent.futures import ThreadPoolExecutor, wait +from datetime import datetime +from typing import Callable, List, TypeVar, Union +from urllib.parse import urlparse + +from databricks.sdk import WorkspaceClient +from databricks.sdk.core import DatabricksError +from databricks.sdk.service.catalog import ColumnTypeName +from performance.data_generation import Distribution, LomaxDistribution, SeedMetadata +from performance.data_model import ColumnType, Container, Table, View +from performance.databricks.unity_proxy_mock import _convert_column_type +from sqlalchemy import create_engine + +from datahub.ingestion.source.sql.sql_config import make_sqlalchemy_uri + +logger = logging.getLogger(__name__) +T = TypeVar("T") + +MAX_WORKERS = 200 + + +class DatabricksDataGenerator: + def __init__(self, host: str, token: str, warehouse_id: str): + self.client = WorkspaceClient(host=host, token=token) + self.warehouse_id = warehouse_id + url = make_sqlalchemy_uri( + scheme="databricks", + username="token", + password=token, + at=urlparse(host).netloc, + db=None, + uri_opts={"http_path": f"/sql/1.0/warehouses/{warehouse_id}"}, + ) + engine = create_engine( + url, connect_args={"timeout": 600}, pool_size=MAX_WORKERS + ) + self.connection = engine.connect() + + def clear_data(self, seed_metadata: SeedMetadata) -> None: + for container in seed_metadata.containers[0]: + try: + self.client.catalogs.delete(container.name, force=True) + except DatabricksError: + pass + + def create_data( + self, + seed_metadata: SeedMetadata, + # Percentiles: 1st=0, 10th=7, 25th=21, 50th=58, 75th=152, 90th=364, 99th=2063, 99.99th=46316 + num_rows_distribution: Distribution = LomaxDistribution(scale=100, shape=1.5), + ) -> None: + """Create data in Databricks based on SeedMetadata.""" + for container in seed_metadata.containers[0]: + self._create_catalog(container) + for container in seed_metadata.containers[1]: + self._create_schema(container) + + _thread_pool_execute("create tables", seed_metadata.tables, self._create_table) + _thread_pool_execute("create views", seed_metadata.views, self._create_view) + _thread_pool_execute( + "populate tables", + seed_metadata.tables, + lambda t: self._populate_table( + t, num_rows_distribution.sample(ceiling=1_000_000) + ), + ) + _thread_pool_execute( + "create table lineage", seed_metadata.tables, self._create_table_lineage + ) + + def _create_catalog(self, catalog: Container) -> None: + try: + self.client.catalogs.get(catalog.name) + except DatabricksError: + self.client.catalogs.create(catalog.name) + + def _create_schema(self, schema: Container) -> None: + try: + self.client.schemas.get(f"{schema.parent.name}.{schema.name}") + except DatabricksError: + self.client.schemas.create(schema.name, schema.parent.name) + + def _create_table(self, table: Table) -> None: + try: + self.client.tables.delete(".".join(table.name_components)) + except DatabricksError: + pass + + columns = ", ".join( + f"{name} {_convert_column_type(column.type).value}" + for name, column in table.columns.items() + ) + self._execute_sql(f"CREATE TABLE {_quote_table(table)} ({columns})") + self._assert_table_exists(table) + + def _create_view(self, view: View) -> None: + self._execute_sql(_generate_view_definition(view)) + self._assert_table_exists(view) + + def _assert_table_exists(self, table: Table) -> None: + self.client.tables.get(".".join(table.name_components)) + + def _populate_table(self, table: Table, num_rows: int) -> None: + values = [ + ", ".join( + str(_generate_value(column.type)) for column in table.columns.values() + ) + for _ in range(num_rows) + ] + values_str = ", ".join(f"({value})" for value in values) + self._execute_sql(f"INSERT INTO {_quote_table(table)} VALUES {values_str}") + + def _create_table_lineage(self, table: Table) -> None: + for upstream in table.upstreams: + self._execute_sql(_generate_insert_lineage(table, upstream)) + + def _execute_sql(self, sql: str) -> None: + print(sql) + self.connection.execute(sql) + + +def _thread_pool_execute(desc: str, lst: List[T], fn: Callable[[T], None]) -> None: + with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: + futures = [executor.submit(fn, item) for item in lst] + wait(futures) + for future in futures: + try: + future.result() + except Exception as e: + logger.error(f"Error executing '{desc}': {e}", exc_info=True) + + +def _generate_value(t: ColumnType) -> Union[int, float, str, bool]: + ctn = _convert_column_type(t) + if ctn == ColumnTypeName.INT: + return random.randint(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.DOUBLE: + return random.uniform(-(2**31), 2**31 - 1) + elif ctn == ColumnTypeName.STRING: + return ( + "'" + "".join(random.choice(string.ascii_letters) for _ in range(8)) + "'" + ) + elif ctn == ColumnTypeName.BOOLEAN: + return random.choice([True, False]) + elif ctn == ColumnTypeName.TIMESTAMP: + return random.randint(0, int(datetime.now().timestamp())) + else: + raise NotImplementedError(f"Unsupported type {ctn}") + + +def _generate_insert_lineage(table: Table, upstream: Table) -> str: + select = [] + for column in table.columns.values(): + matching_cols = [c for c in upstream.columns.values() if c.type == column.type] + if matching_cols: + upstream_col = random.choice(matching_cols) + select.append(f"{upstream_col.name} AS {column.name}") + else: + select.append(f"{_generate_value(column.type)} AS {column.name}") + + return f"INSERT INTO {_quote_table(table)} SELECT {', '.join(select)} FROM {_quote_table(upstream)}" + + +def _generate_view_definition(view: View) -> str: + from_statement = f"FROM {_quote_table(view.upstreams[0])} t0" + join_statement = " ".join( + f"JOIN {_quote_table(upstream)} t{i+1} ON t0.id = t{i+1}.id" + for i, upstream in enumerate(view.upstreams[1:]) + ) + return f"CREATE VIEW {_quote_table(view)} AS SELECT * {from_statement} {join_statement} {view.definition}" + + +def _quote_table(table: Table) -> str: + return ".".join(f"`{component}`" for component in table.name_components) diff --git a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py index 593163e12bf0a..ee1caf6783ec1 100644 --- a/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py +++ b/metadata-ingestion/tests/performance/databricks/unity_proxy_mock.py @@ -88,22 +88,21 @@ def schemas(self, catalog: Catalog) -> Iterable[Schema]: def tables(self, schema: Schema) -> Iterable[Table]: for table in self._schema_to_table[schema.name]: columns = [] - if table.column_mapping: - for i, col_name in enumerate(table.columns): - column = table.column_mapping[col_name] - columns.append( - Column( - id=column.name, - name=column.name, - type_name=self._convert_column_type(column.type), - type_text=column.type.value, - nullable=column.nullable, - position=i, - comment=None, - type_precision=0, - type_scale=0, - ) + for i, col_name in enumerate(table.columns): + column = table.columns[col_name] + columns.append( + Column( + id=column.name, + name=column.name, + type_name=_convert_column_type(column.type), + type_text=column.type.value, + nullable=column.nullable, + position=i, + comment=None, + type_precision=0, + type_scale=0, ) + ) yield Table( id=f"{schema.id}.{table.name}", @@ -145,7 +144,7 @@ def query_history( yield Query( query_id=str(i), query_text=query.text, - statement_type=self._convert_statement_type(query.type), + statement_type=_convert_statement_type(query.type), start_time=query.timestamp, end_time=query.timestamp, user_id=hash(query.actor), @@ -160,24 +159,24 @@ def table_lineage(self, table: Table) -> None: def get_column_lineage(self, table: Table) -> None: pass - @staticmethod - def _convert_column_type(t: ColumnType) -> ColumnTypeName: - if t == ColumnType.INTEGER: - return ColumnTypeName.INT - elif t == ColumnType.FLOAT: - return ColumnTypeName.DOUBLE - elif t == ColumnType.STRING: - return ColumnTypeName.STRING - elif t == ColumnType.BOOLEAN: - return ColumnTypeName.BOOLEAN - elif t == ColumnType.DATETIME: - return ColumnTypeName.TIMESTAMP - else: - raise ValueError(f"Unknown column type: {t}") - - @staticmethod - def _convert_statement_type(t: StatementType) -> QueryStatementType: - if t == "CUSTOM" or t == "UNKNOWN": - return QueryStatementType.OTHER - else: - return QueryStatementType[t] + +def _convert_column_type(t: ColumnType) -> ColumnTypeName: + if t == ColumnType.INTEGER: + return ColumnTypeName.INT + elif t == ColumnType.FLOAT: + return ColumnTypeName.DOUBLE + elif t == ColumnType.STRING: + return ColumnTypeName.STRING + elif t == ColumnType.BOOLEAN: + return ColumnTypeName.BOOLEAN + elif t == ColumnType.DATETIME: + return ColumnTypeName.TIMESTAMP + else: + raise ValueError(f"Unknown column type: {t}") + + +def _convert_statement_type(t: StatementType) -> QueryStatementType: + if t == "CUSTOM" or t == "UNKNOWN": + return QueryStatementType.OTHER + else: + return QueryStatementType[t] diff --git a/metadata-ingestion/tests/unit/test_bigquery_source.py b/metadata-ingestion/tests/unit/test_bigquery_source.py index 4cfa5c48d2377..3cdb73d77d0a1 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_source.py +++ b/metadata-ingestion/tests/unit/test_bigquery_source.py @@ -324,7 +324,7 @@ def test_get_projects_list_failure( {"project_id_pattern": {"deny": ["^test-project$"]}} ) source = BigqueryV2Source(config=config, ctx=PipelineContext(run_id="test")) - caplog.records.clear() + caplog.clear() with caplog.at_level(logging.ERROR): projects = source._get_projects() assert len(caplog.records) == 1 diff --git a/metadata-ingestion/tests/unit/test_bigquery_usage.py b/metadata-ingestion/tests/unit/test_bigquery_usage.py index 1eb5d8b00e27c..c0055763bc15b 100644 --- a/metadata-ingestion/tests/unit/test_bigquery_usage.py +++ b/metadata-ingestion/tests/unit/test_bigquery_usage.py @@ -1,7 +1,7 @@ import logging import random from datetime import datetime, timedelta, timezone -from typing import Iterable, cast +from typing import Iterable from unittest.mock import MagicMock, patch import pytest @@ -45,15 +45,16 @@ ACTOR_2, ACTOR_2_URN = "b@acryl.io", "urn:li:corpuser:b" DATABASE_1 = Container("database_1") DATABASE_2 = Container("database_2") -TABLE_1 = Table("table_1", DATABASE_1, ["id", "name", "age"], None) -TABLE_2 = Table("table_2", DATABASE_1, ["id", "table_1_id", "value"], None) +TABLE_1 = Table("table_1", DATABASE_1, columns=["id", "name", "age"], upstreams=[]) +TABLE_2 = Table( + "table_2", DATABASE_1, columns=["id", "table_1_id", "value"], upstreams=[] +) VIEW_1 = View( name="view_1", container=DATABASE_1, columns=["id", "name", "total"], definition="VIEW DEFINITION 1", - parents=[TABLE_1, TABLE_2], - column_mapping=None, + upstreams=[TABLE_1, TABLE_2], ) ALL_TABLES = [TABLE_1, TABLE_2, VIEW_1] @@ -842,6 +843,7 @@ def test_usage_counts_no_columns( ) ), ] + caplog.clear() with caplog.at_level(logging.WARNING): workunits = usage_extractor._get_workunits_internal( events, [TABLE_REFS[TABLE_1.name]] @@ -938,7 +940,7 @@ def test_operational_stats( ).to_urn("PROD") for field in query.fields_accessed if field.table.is_view() - for parent in cast(View, field.table).parents + for parent in field.table.upstreams ) ), ), From c66619ccc7be509e37e804588023c51984b4fb33 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Tue, 5 Dec 2023 14:03:24 -0600 Subject: [PATCH 4/6] fix(elasticsearch): set datahub usage events shard & replica count (#9388) --- docker/elasticsearch-setup/create-indices.sh | 7 ++++++- .../resources/index/usage-event/aws_es_index_template.json | 4 +++- .../main/resources/index/usage-event/index_template.json | 4 +++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh index 5c4eb3ce3851e..81cf405bf4b3d 100755 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -5,6 +5,8 @@ set -e : ${DATAHUB_ANALYTICS_ENABLED:=true} : ${USE_AWS_ELASTICSEARCH:=false} : ${ELASTICSEARCH_INSECURE:=false} +: ${DUE_SHARDS:=1} +: ${DUE_REPLICAS:=1} # protocol: http or https? if [[ $ELASTICSEARCH_USE_SSL == true ]]; then @@ -74,7 +76,10 @@ function create_if_not_exists { # use the file at given path as definition, but first replace all occurences of `PREFIX` # placeholder within the file with the actual prefix value TMP_SOURCE_PATH="/tmp/$RESOURCE_DEFINITION_NAME" - sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" | tee -a "$TMP_SOURCE_PATH" + sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" \ + | sed -e "s/DUE_SHARDS/$DUE_SHARDS/g" \ + | sed -e "s/DUE_REPLICAS/$DUE_REPLICAS/g" \ + | tee -a "$TMP_SOURCE_PATH" curl "${CURL_ARGS[@]}" -XPUT "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS" -H 'Content-Type: application/json' --data "@$TMP_SOURCE_PATH" elif [ $RESOURCE_STATUS -eq 403 ]; then diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json index 21e98e4e96b5f..16d1e14720b2d 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/aws_es_index_template.json @@ -20,6 +20,8 @@ } }, "settings": { - "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event" + "index.opendistro.index_state_management.rollover_alias": "PREFIXdatahub_usage_event", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } \ No newline at end of file diff --git a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json index 44f6e644713eb..e3c6a8c37e573 100644 --- a/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json +++ b/metadata-service/restli-servlet-impl/src/main/resources/index/usage-event/index_template.json @@ -23,7 +23,9 @@ } }, "settings": { - "index.lifecycle.name": "PREFIXdatahub_usage_event_policy" + "index.lifecycle.name": "PREFIXdatahub_usage_event_policy", + "index.number_of_shards": DUE_SHARDS, + "index.number_of_replicas": DUE_REPLICAS } } } \ No newline at end of file From 7fb60869f2a9757d6729d52a44f5c0390af86381 Mon Sep 17 00:00:00 2001 From: siladitya <68184387+siladitya2@users.noreply.github.com> Date: Wed, 6 Dec 2023 03:28:47 +0100 Subject: [PATCH 5/6] feat(gms/search): Adding support for DOUBLE Searchable type (#9369) Co-authored-by: si-chakraborty --- .../metadata/models/annotation/SearchableAnnotation.java | 3 ++- .../com/linkedin/metadata/models/EntitySpecBuilderTest.java | 6 +++++- .../search/elasticsearch/indexbuilder/MappingsBuilder.java | 2 ++ .../metadata/search/indexbuilder/MappingsBuilderTest.java | 6 +++++- .../src/main/pegasus/com/datahub/test/TestEntityInfo.pdl | 6 ++++++ 5 files changed, 20 insertions(+), 3 deletions(-) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index d5e5044f95c23..efa30a948e237 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -66,7 +66,8 @@ public enum FieldType { DATETIME, OBJECT, BROWSE_PATH_V2, - WORD_GRAM + WORD_GRAM, + DOUBLE } @Nonnull diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index 3618108970afa..b95cb1085283f 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -142,7 +142,7 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields - assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 10); + assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( @@ -189,6 +189,10 @@ private void validateTestEntityInfo(final AspectSpec testEntityInfo) { new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); + assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); + assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( + new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); // Assert on Relationship Fields diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 35cef71edd953..13a0f57ccea99 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -134,6 +134,8 @@ private static Map getMappingsForField(@Nonnull final Searchable mappingForField.put(TYPE, ESUtils.DATE_FIELD_TYPE); } else if (fieldType == FieldType.OBJECT) { mappingForField.put(TYPE, ESUtils.OBJECT_FIELD_TYPE); + } else if (fieldType == FieldType.DOUBLE) { + mappingForField.put(TYPE, ESUtils.DOUBLE_FIELD_TYPE); } else { log.info("FieldType {} has no mappings implemented", fieldType); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 0d2ce236d9f54..d9f2f0e5aac94 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -18,7 +18,7 @@ public void testMappingsBuilder() { Map result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); assertEquals(result.size(), 1); Map properties = (Map) result.get("properties"); - assertEquals(properties.size(), 19); + assertEquals(properties.size(), 20); assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", "fields", ImmutableMap.of("delimited", @@ -123,5 +123,9 @@ public void testMappingsBuilder() { assertEquals(feature1.get("type"), "double"); Map feature2 = (Map) properties.get("feature2"); assertEquals(feature2.get("type"), "double"); + + // DOUBLE + Map doubleField = (Map) properties.get("doubleField"); + assertEquals(doubleField.get("type"), "double"); } } diff --git a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl index 6dff14133ee60..db293140ad650 100644 --- a/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl +++ b/test-models/src/main/pegasus/com/datahub/test/TestEntityInfo.pdl @@ -90,4 +90,10 @@ record TestEntityInfo includes CustomProperties { } } esObjectField: optional map[string, string] + + @Searchable = { + "fieldName": "doubleField", + "fieldType": "DOUBLE" + } + doubleField: optional double } From e14474176f20e38b2c4c883949c561223181b57c Mon Sep 17 00:00:00 2001 From: Aseem Bansal Date: Wed, 6 Dec 2023 11:02:42 +0530 Subject: [PATCH 6/6] feat(lint): add spotless for java lint (#9373) --- build.gradle | 39 +- datahub-frontend/app/auth/AuthModule.java | 366 +- datahub-frontend/app/auth/AuthUtils.java | 247 +- datahub-frontend/app/auth/Authenticator.java | 57 +- datahub-frontend/app/auth/ConfigUtil.java | 10 +- datahub-frontend/app/auth/CookieConfigs.java | 19 +- datahub-frontend/app/auth/JAASConfigs.java | 19 +- .../app/auth/NativeAuthenticationConfigs.java | 16 +- .../app/auth/cookie/CustomCookiesModule.java | 5 +- datahub-frontend/app/auth/sso/SsoConfigs.java | 34 +- datahub-frontend/app/auth/sso/SsoManager.java | 15 +- .../app/auth/sso/SsoProvider.java | 22 +- .../sso/oidc/OidcAuthorizationGenerator.java | 69 +- .../app/auth/sso/oidc/OidcCallbackLogic.java | 273 +- .../app/auth/sso/oidc/OidcConfigs.java | 194 +- .../app/auth/sso/oidc/OidcProvider.java | 33 +- .../sso/oidc/OidcResponseErrorHandler.java | 77 +- .../oidc/custom/CustomOidcAuthenticator.java | 76 +- .../app/client/AuthServiceClient.java | 152 +- .../app/client/KafkaTrackingProducer.java | 228 +- .../app/config/ConfigurationProvider.java | 18 +- .../app/controllers/Application.java | 179 +- .../controllers/AuthenticationController.java | 590 +-- .../controllers/CentralLogoutController.java | 32 +- .../controllers/SsoCallbackController.java | 99 +- .../app/controllers/TrackingController.java | 82 +- datahub-frontend/app/security/AuthUtil.java | 37 +- .../app/security/AuthenticationManager.java | 16 +- .../app/security/DummyLoginModule.java | 18 +- datahub-frontend/app/utils/ConfigUtil.java | 20 +- datahub-frontend/app/utils/SearchUtil.java | 39 +- datahub-frontend/build.gradle | 2 - .../test/app/ApplicationTest.java | 68 +- .../test/security/DummyLoginModuleTest.java | 8 +- .../test/security/OidcConfigurationTest.java | 567 ++- .../test/utils/SearchUtilTest.java | 23 +- .../linkedin/datahub/graphql/Constants.java | 42 +- .../datahub/graphql/GmsGraphQLEngine.java | 3369 ++++++++++------- .../datahub/graphql/GmsGraphQLEngineArgs.java | 72 +- .../datahub/graphql/GmsGraphQLPlugin.java | 25 +- .../datahub/graphql/GraphQLEngine.java | 241 +- .../datahub/graphql/QueryContext.java | 43 +- .../datahub/graphql/RelationshipKey.java | 1 - .../datahub/graphql/TimeSeriesAspectArgs.java | 6 +- .../datahub/graphql/UsageStatsKey.java | 1 - .../datahub/graphql/VersionedAspectKey.java | 2 +- .../graphql/WeaklyTypedAspectsResolver.java | 104 +- .../resolver/AnalyticsChartTypeResolver.java | 23 +- .../analytics/resolver/GetChartsResolver.java | 265 +- .../resolver/GetHighlightsResolver.java | 114 +- .../GetMetadataAnalyticsResolver.java | 93 +- .../resolver/IsAnalyticsEnabledResolver.java | 9 +- .../analytics/service/AnalyticsService.java | 179 +- .../analytics/service/AnalyticsUtil.java | 182 +- .../authorization/AuthorizationUtils.java | 183 +- .../exception/AuthenticationException.java | 16 +- .../exception/AuthorizationException.java | 5 +- .../DataHubDataFetcherExceptionHandler.java | 3 +- .../exception/DataHubGraphQLError.java | 12 +- .../exception/DataHubGraphQLException.java | 1 - .../exception/ValidationException.java | 16 +- .../graphql/featureflags/FeatureFlags.java | 1 - .../datahub/graphql/resolvers/AuthUtils.java | 24 +- .../resolvers/AuthenticatedResolver.java | 27 +- .../graphql/resolvers/BatchLoadUtils.java | 14 +- .../graphql/resolvers/EntityTypeMapper.java | 10 +- .../datahub/graphql/resolvers/MeResolver.java | 175 +- .../graphql/resolvers/ResolverUtils.java | 328 +- .../assertion/AssertionRunEventResolver.java | 128 +- .../assertion/DeleteAssertionResolver.java | 109 +- .../assertion/EntityAssertionsResolver.java | 102 +- .../resolvers/auth/AccessTokenUtil.java | 11 +- .../auth/CreateAccessTokenResolver.java | 133 +- .../auth/GetAccessTokenResolver.java | 75 +- .../auth/ListAccessTokensResolver.java | 123 +- .../auth/RevokeAccessTokenResolver.java | 58 +- .../resolvers/browse/BrowsePathsResolver.java | 86 +- .../resolvers/browse/BrowseResolver.java | 98 +- .../browse/EntityBrowsePathsResolver.java | 22 +- .../resolvers/chart/BrowseV2Resolver.java | 102 +- .../chart/ChartStatsSummaryResolver.java | 15 +- .../resolvers/config/AppConfigResolver.java | 111 +- .../container/ContainerEntitiesResolver.java | 93 +- .../container/ParentContainersResolver.java | 53 +- .../DashboardStatsSummaryResolver.java | 106 +- .../DashboardUsageStatsResolver.java | 74 +- .../dashboard/DashboardUsageStatsUtils.java | 184 +- .../BatchSetDataProductResolver.java | 101 +- .../CreateDataProductResolver.java | 69 +- .../DataProductAuthorizationUtils.java | 47 +- .../DeleteDataProductResolver.java | 55 +- .../ListDataProductAssetsResolver.java | 176 +- .../UpdateDataProductResolver.java | 77 +- .../dataset/DatasetHealthResolver.java | 130 +- .../dataset/DatasetStatsSummaryResolver.java | 96 +- .../dataset/DatasetUsageStatsResolver.java | 40 +- .../UpdateDeprecationResolver.java | 113 +- .../domain/CreateDomainResolver.java | 156 +- .../domain/DeleteDomainResolver.java | 65 +- .../domain/DomainEntitiesResolver.java | 100 +- .../resolvers/domain/ListDomainsResolver.java | 88 +- .../domain/ParentDomainsResolver.java | 72 +- .../resolvers/domain/SetDomainResolver.java | 86 +- .../resolvers/domain/UnsetDomainResolver.java | 74 +- .../resolvers/embed/UpdateEmbedResolver.java | 91 +- .../entity/EntityExistsResolver.java | 30 +- .../entity/EntityPrivilegesResolver.java | 71 +- .../glossary/AddRelatedTermsResolver.java | 135 +- .../glossary/CreateGlossaryNodeResolver.java | 116 +- .../glossary/CreateGlossaryTermResolver.java | 160 +- .../DeleteGlossaryEntityResolver.java | 61 +- .../GetRootGlossaryNodesResolver.java | 97 +- .../GetRootGlossaryTermsResolver.java | 88 +- .../glossary/ParentNodesResolver.java | 96 +- .../glossary/RemoveRelatedTermsResolver.java | 124 +- .../group/AddGroupMembersResolver.java | 72 +- .../resolvers/group/CreateGroupResolver.java | 41 +- .../resolvers/group/EntityCountsResolver.java | 62 +- .../resolvers/group/ListGroupsResolver.java | 82 +- .../group/RemoveGroupMembersResolver.java | 70 +- .../resolvers/group/RemoveGroupResolver.java | 46 +- .../resolvers/ingest/IngestionAuthUtils.java | 13 +- .../ingest/IngestionResolverUtils.java | 63 +- ...ncelIngestionExecutionRequestResolver.java | 101 +- ...eateIngestionExecutionRequestResolver.java | 194 +- .../CreateTestConnectionRequestResolver.java | 104 +- .../GetIngestionExecutionRequestResolver.java | 60 +- ...estionSourceExecutionRequestsResolver.java | 120 +- .../execution/RollbackIngestionResolver.java | 52 +- .../ingest/secret/CreateSecretResolver.java | 79 +- .../ingest/secret/DeleteSecretResolver.java | 27 +- .../secret/GetSecretValuesResolver.java | 97 +- .../ingest/secret/ListSecretsResolver.java | 107 +- .../resolvers/ingest/secret/SecretUtils.java | 11 +- .../source/DeleteIngestionSourceResolver.java | 29 +- .../source/GetIngestionSourceResolver.java | 52 +- .../source/ListIngestionSourcesResolver.java | 108 +- .../source/UpsertIngestionSourceResolver.java | 118 +- .../resolvers/jobs/DataJobRunsResolver.java | 127 +- .../resolvers/jobs/EntityRunsResolver.java | 140 +- .../lineage/UpdateLineageResolver.java | 230 +- .../resolvers/load/AspectResolver.java | 22 +- .../load/BatchGetEntitiesResolver.java | 28 +- .../load/EntityLineageResultResolver.java | 73 +- .../EntityRelationshipsResultResolver.java | 54 +- .../load/EntityTypeBatchResolver.java | 34 +- .../resolvers/load/EntityTypeResolver.java | 79 +- .../load/LoadableTypeBatchResolver.java | 41 +- .../resolvers/load/LoadableTypeResolver.java | 41 +- .../resolvers/load/OwnerTypeResolver.java | 42 +- .../load/TimeSeriesAspectResolver.java | 115 +- .../resolvers/mutate/AddLinkResolver.java | 61 +- .../resolvers/mutate/AddOwnerResolver.java | 45 +- .../resolvers/mutate/AddOwnersResolver.java | 66 +- .../resolvers/mutate/AddTagResolver.java | 80 +- .../resolvers/mutate/AddTagsResolver.java | 79 +- .../resolvers/mutate/AddTermResolver.java | 70 +- .../resolvers/mutate/AddTermsResolver.java | 75 +- .../mutate/BatchAddOwnersResolver.java | 62 +- .../mutate/BatchAddTagsResolver.java | 127 +- .../mutate/BatchAddTermsResolver.java | 115 +- .../mutate/BatchRemoveOwnersResolver.java | 77 +- .../mutate/BatchRemoveTagsResolver.java | 64 +- .../mutate/BatchRemoveTermsResolver.java | 64 +- .../mutate/BatchSetDomainResolver.java | 61 +- .../BatchUpdateDeprecationResolver.java | 68 +- .../BatchUpdateSoftDeletedResolver.java | 55 +- .../resolvers/mutate/DescriptionUtils.java | 440 ++- .../resolvers/mutate/MoveDomainResolver.java | 110 +- .../mutate/MutableTypeBatchResolver.java | 43 +- .../resolvers/mutate/MutableTypeResolver.java | 50 +- .../resolvers/mutate/MutationUtils.java | 80 +- .../resolvers/mutate/RemoveLinkResolver.java | 55 +- .../resolvers/mutate/RemoveOwnerResolver.java | 59 +- .../resolvers/mutate/RemoveTagResolver.java | 79 +- .../resolvers/mutate/RemoveTermResolver.java | 79 +- .../mutate/UpdateDescriptionResolver.java | 661 ++-- .../resolvers/mutate/UpdateNameResolver.java | 219 +- .../mutate/UpdateParentNodeResolver.java | 113 +- .../mutate/UpdateUserSettingResolver.java | 71 +- .../resolvers/mutate/util/DeleteUtils.java | 54 +- .../mutate/util/DeprecationUtils.java | 58 +- .../resolvers/mutate/util/DomainUtils.java | 217 +- .../resolvers/mutate/util/EmbedUtils.java | 25 +- .../resolvers/mutate/util/GlossaryUtils.java | 109 +- .../resolvers/mutate/util/LabelUtils.java | 409 +- .../resolvers/mutate/util/LinkUtils.java | 98 +- .../resolvers/mutate/util/OwnerUtils.java | 243 +- .../resolvers/mutate/util/SiblingsUtils.java | 21 +- .../operation/ReportOperationResolver.java | 105 +- .../CreateOwnershipTypeResolver.java | 41 +- .../DeleteOwnershipTypeResolver.java | 28 +- .../ownership/ListOwnershipTypesResolver.java | 86 +- .../UpdateOwnershipTypeResolver.java | 51 +- .../policy/DeletePolicyResolver.java | 35 +- .../policy/GetGrantedPrivilegesResolver.java | 33 +- .../policy/ListPoliciesResolver.java | 33 +- .../resolvers/policy/PolicyAuthUtils.java | 10 +- .../policy/UpsertPolicyResolver.java | 48 +- .../mappers/PolicyInfoPolicyMapper.java | 45 +- .../mappers/PolicyUpdateInputInfoMapper.java | 43 +- .../resolvers/post/CreatePostResolver.java | 38 +- .../resolvers/post/DeletePostResolver.java | 19 +- .../resolvers/post/ListPostsResolver.java | 73 +- .../resolvers/query/CreateQueryResolver.java | 76 +- .../resolvers/query/DeleteQueryResolver.java | 46 +- .../resolvers/query/ListQueriesResolver.java | 83 +- .../resolvers/query/UpdateQueryResolver.java | 109 +- .../ListRecommendationsResolver.java | 111 +- .../resolvers/role/AcceptRoleResolver.java | 47 +- .../role/BatchAssignRoleResolver.java | 30 +- .../role/CreateInviteTokenResolver.java | 30 +- .../role/GetInviteTokenResolver.java | 30 +- .../resolvers/role/ListRolesResolver.java | 66 +- .../AggregateAcrossEntitiesResolver.java | 112 +- .../AutoCompleteForMultipleResolver.java | 133 +- .../search/AutoCompleteResolver.java | 114 +- .../resolvers/search/AutocompleteUtils.java | 102 +- .../search/GetQuickFiltersResolver.java | 172 +- .../search/ScrollAcrossEntitiesResolver.java | 124 +- .../search/ScrollAcrossLineageResolver.java | 124 +- .../search/SearchAcrossEntitiesResolver.java | 109 +- .../search/SearchAcrossLineageResolver.java | 138 +- .../resolvers/search/SearchResolver.java | 74 +- .../graphql/resolvers/search/SearchUtils.java | 301 +- .../UpdateCorpUserViewsSettingsResolver.java | 76 +- .../view/GlobalViewsSettingsResolver.java | 36 +- .../UpdateGlobalViewsSettingsResolver.java | 70 +- .../step/BatchGetStepStatesResolver.java | 109 +- .../step/BatchUpdateStepStatesResolver.java | 65 +- .../resolvers/tag/CreateTagResolver.java | 93 +- .../resolvers/tag/DeleteTagResolver.java | 55 +- .../resolvers/tag/SetTagColorResolver.java | 104 +- .../resolvers/test/CreateTestResolver.java | 85 +- .../resolvers/test/DeleteTestResolver.java | 32 +- .../resolvers/test/ListTestsResolver.java | 81 +- .../resolvers/test/TestResultsResolver.java | 55 +- .../graphql/resolvers/test/TestUtils.java | 14 +- .../resolvers/test/UpdateTestResolver.java | 53 +- .../timeline/GetSchemaBlameResolver.java | 62 +- .../GetSchemaVersionListResolver.java | 53 +- .../type/AspectInterfaceTypeResolver.java | 20 +- .../type/EntityInterfaceTypeResolver.java | 26 +- .../type/HyperParameterValueTypeResolver.java | 36 +- .../type/PlatformSchemaUnionTypeResolver.java | 22 +- .../resolvers/type/ResultsTypeResolver.java | 18 +- ...TimeSeriesAspectInterfaceTypeResolver.java | 3 +- .../CreateNativeUserResetTokenResolver.java | 41 +- .../resolvers/user/ListUsersResolver.java | 77 +- .../resolvers/user/RemoveUserResolver.java | 46 +- .../user/UpdateUserStatusResolver.java | 46 +- .../resolvers/view/CreateViewResolver.java | 79 +- .../resolvers/view/DeleteViewResolver.java | 40 +- .../view/ListGlobalViewsResolver.java | 86 +- .../resolvers/view/ListMyViewsResolver.java | 100 +- .../resolvers/view/UpdateViewResolver.java | 63 +- .../graphql/resolvers/view/ViewUtils.java | 92 +- .../graphql/scalar/LongScalarType.java | 3 +- .../graphql/types/BatchMutableType.java | 18 +- .../graphql/types/BrowsableEntityType.java | 58 +- .../datahub/graphql/types/EntityType.java | 16 +- .../datahub/graphql/types/LoadableType.java | 68 +- .../datahub/graphql/types/MutableType.java | 25 +- .../graphql/types/SearchableEntityType.java | 83 +- .../graphql/types/aspect/AspectMapper.java | 1 - .../graphql/types/aspect/AspectType.java | 72 +- .../types/assertion/AssertionMapper.java | 45 +- .../types/assertion/AssertionType.java | 112 +- .../types/auth/AccessTokenMetadataType.java | 25 +- .../mappers/AccessTokenMetadataMapper.java | 9 +- .../graphql/types/chart/ChartType.java | 366 +- .../types/chart/mappers/ChartMapper.java | 339 +- .../chart/mappers/ChartUpdateInputMapper.java | 109 +- .../chart/mappers/InputFieldsMapper.java | 53 +- .../common/mappers/AuditStampMapper.java | 26 +- .../common/mappers/BrowsePathsV2Mapper.java | 6 +- .../mappers/ChangeAuditStampsMapper.java | 4 +- .../types/common/mappers/CostMapper.java | 26 +- .../types/common/mappers/CostValueMapper.java | 29 +- .../mappers/CustomPropertiesMapper.java | 36 +- .../DataPlatformInstanceAspectMapper.java | 10 +- .../common/mappers/DeprecationMapper.java | 32 +- .../types/common/mappers/EmbedMapper.java | 1 - .../mappers/FineGrainedLineagesMapper.java | 42 +- .../mappers/InstitutionalMemoryMapper.java | 27 +- .../InstitutionalMemoryMetadataMapper.java | 46 +- ...stitutionalMemoryMetadataUpdateMapper.java | 37 +- .../InstitutionalMemoryUpdateMapper.java | 34 +- .../types/common/mappers/OperationMapper.java | 91 +- .../types/common/mappers/OwnerMapper.java | 74 +- .../common/mappers/OwnerUpdateMapper.java | 76 +- .../types/common/mappers/OwnershipMapper.java | 33 +- .../common/mappers/OwnershipSourceMapper.java | 30 +- .../common/mappers/OwnershipUpdateMapper.java | 37 +- .../mappers/SearchFlagsInputMapper.java | 10 +- .../types/common/mappers/SiblingsMapper.java | 12 +- .../types/common/mappers/StatusMapper.java | 21 +- .../types/common/mappers/StringMapMapper.java | 31 +- .../types/common/mappers/SubTypesMapper.java | 9 +- .../mappers/UpstreamLineagesMapper.java | 12 +- .../common/mappers/UrnToEntityMapper.java | 7 +- .../common/mappers/util/MappingHelper.java | 8 +- .../types/common/mappers/util/RunInfo.java | 1 - .../mappers/util/SystemMetadataUtils.java | 19 +- .../mappers/util/UpdateMappingHelper.java | 1 - .../types/container/ContainerType.java | 108 +- .../container/mappers/ContainerMapper.java | 80 +- .../types/corpgroup/CorpGroupType.java | 329 +- .../types/corpgroup/CorpGroupUtils.java | 24 +- .../CorpGroupEditablePropertiesMapper.java | 18 +- .../mappers/CorpGroupInfoMapper.java | 76 +- .../corpgroup/mappers/CorpGroupMapper.java | 119 +- .../mappers/CorpGroupPropertiesMapper.java | 9 +- .../graphql/types/corpuser/CorpUserType.java | 355 +- .../graphql/types/corpuser/CorpUserUtils.java | 24 +- .../mappers/CorpUserEditableInfoMapper.java | 46 +- .../corpuser/mappers/CorpUserInfoMapper.java | 48 +- .../corpuser/mappers/CorpUserMapper.java | 212 +- .../mappers/CorpUserPropertiesMapper.java | 12 +- .../mappers/CorpUserStatusMapper.java | 9 +- .../types/dashboard/DashboardType.java | 366 +- .../dashboard/mappers/DashboardMapper.java | 325 +- .../mappers/DashboardUpdateInputMapper.java | 110 +- .../mappers/DashboardUsageMetricMapper.java | 7 +- .../graphql/types/dataflow/DataFlowType.java | 340 +- .../dataflow/mappers/DataFlowMapper.java | 239 +- .../mappers/DataFlowUpdateInputMapper.java | 44 +- .../graphql/types/datajob/DataJobType.java | 346 +- .../types/datajob/mappers/DataJobMapper.java | 247 +- .../mappers/DataJobUpdateInputMapper.java | 104 +- .../types/dataplatform/DataPlatformType.java | 86 +- .../mappers/DataPlatformInfoMapper.java | 36 +- .../mappers/DataPlatformMapper.java | 64 +- .../mappers/DataPlatformPropertiesMapper.java | 37 +- .../DataPlatformInstanceType.java | 182 +- .../mappers/DataPlatformInstanceMapper.java | 107 +- .../mappers/DataProcessInstanceMapper.java | 64 +- .../DataProcessInstanceRunEventMapper.java | 65 +- .../DataProcessInstanceRunResultMapper.java | 42 +- .../types/dataproduct/DataProductType.java | 87 +- .../mappers/DataProductMapper.java | 67 +- .../graphql/types/dataset/DatasetType.java | 487 +-- .../graphql/types/dataset/DatasetUtils.java | 16 +- .../types/dataset/VersionedDatasetType.java | 64 +- .../mappers/AssertionRunEventMapper.java | 13 +- .../mappers/DatasetDeprecationMapper.java | 31 +- .../types/dataset/mappers/DatasetMapper.java | 305 +- .../dataset/mappers/DatasetProfileMapper.java | 21 +- .../mappers/DatasetUpdateInputMapper.java | 78 +- .../EditableSchemaFieldInfoMapper.java | 51 +- .../mappers/EditableSchemaMetadataMapper.java | 35 +- .../mappers/ForeignKeyConstraintMapper.java | 18 +- .../dataset/mappers/PlatformSchemaMapper.java | 109 +- .../dataset/mappers/SchemaFieldMapper.java | 119 +- .../types/dataset/mappers/SchemaMapper.java | 71 +- .../dataset/mappers/SchemaMetadataMapper.java | 65 +- .../mappers/VersionedDatasetMapper.java | 110 +- .../types/domain/DomainAssociationMapper.java | 38 +- .../graphql/types/domain/DomainMapper.java | 25 +- .../graphql/types/domain/DomainType.java | 74 +- .../types/glossary/GlossaryNodeType.java | 52 +- .../types/glossary/GlossaryTermType.java | 246 +- .../types/glossary/GlossaryTermUtils.java | 28 +- .../glossary/mappers/GlossaryNodeMapper.java | 16 +- .../mappers/GlossaryTermInfoMapper.java | 53 +- .../glossary/mappers/GlossaryTermMapper.java | 107 +- .../mappers/GlossaryTermPropertiesMapper.java | 17 +- .../glossary/mappers/GlossaryTermsMapper.java | 75 +- .../mappers/AutoCompleteResultsMapper.java | 33 +- .../types/mappers/BrowsePathMapper.java | 30 +- .../types/mappers/BrowsePathsMapper.java | 25 +- .../types/mappers/BrowseResultMapper.java | 11 +- .../types/mappers/InputModelMapper.java | 8 +- .../graphql/types/mappers/MapperUtils.java | 87 +- .../graphql/types/mappers/ModelMapper.java | 7 +- .../types/mappers/TimeSeriesAspectMapper.java | 6 +- .../UrnScrollAcrossLineageResultsMapper.java | 15 +- .../types/mappers/UrnScrollResultsMapper.java | 9 +- .../UrnSearchAcrossLineageResultsMapper.java | 31 +- .../types/mappers/UrnSearchResultsMapper.java | 14 +- .../types/mlmodel/MLFeatureTableType.java | 220 +- .../graphql/types/mlmodel/MLFeatureType.java | 142 +- .../types/mlmodel/MLModelGroupType.java | 222 +- .../graphql/types/mlmodel/MLModelType.java | 213 +- .../graphql/types/mlmodel/MLModelUtils.java | 63 +- .../types/mlmodel/MLPrimaryKeyType.java | 143 +- .../types/mlmodel/mappers/BaseDataMapper.java | 25 +- .../CaveatsAndRecommendationsMapper.java | 43 +- .../mlmodel/mappers/CaveatsDetailsMapper.java | 28 +- .../mappers/EthicalConsiderationsMapper.java | 34 +- .../mappers/HyperParameterMapMapper.java | 30 +- .../HyperParameterValueTypeMapper.java | 49 +- .../mlmodel/mappers/IntendedUseMapper.java | 36 +- .../mlmodel/mappers/MLFeatureMapper.java | 166 +- .../mappers/MLFeaturePropertiesMapper.java | 58 +- .../mlmodel/mappers/MLFeatureTableMapper.java | 170 +- .../MLFeatureTablePropertiesMapper.java | 71 +- .../mlmodel/mappers/MLHyperParamMapper.java | 30 +- .../types/mlmodel/mappers/MLMetricMapper.java | 27 +- .../mappers/MLModelFactorPromptsMapper.java | 43 +- .../mlmodel/mappers/MLModelFactorsMapper.java | 43 +- .../mlmodel/mappers/MLModelGroupMapper.java | 160 +- .../mappers/MLModelGroupPropertiesMapper.java | 35 +- .../types/mlmodel/mappers/MLModelMapper.java | 257 +- .../mappers/MLModelPropertiesMapper.java | 100 +- .../mlmodel/mappers/MLPrimaryKeyMapper.java | 152 +- .../mappers/MLPrimaryKeyPropertiesMapper.java | 58 +- .../types/mlmodel/mappers/MetricsMapper.java | 23 +- .../mappers/QuantitativeAnalysesMapper.java | 29 +- .../mlmodel/mappers/ResultsTypeMapper.java | 30 +- .../mlmodel/mappers/SourceCodeUrlMapper.java | 28 +- .../mlmodel/mappers/VersionTagMapper.java | 26 +- .../graphql/types/notebook/NotebookType.java | 159 +- .../notebook/mappers/NotebookMapper.java | 139 +- .../mappers/NotebookUpdateInputMapper.java | 34 +- .../types/ownership/OwnershipType.java | 34 +- .../types/ownership/OwnershipTypeMapper.java | 13 +- .../types/policy/DataHubPolicyMapper.java | 44 +- .../types/policy/DataHubPolicyType.java | 26 +- .../graphql/types/post/PostMapper.java | 5 +- .../graphql/types/query/QueryMapper.java | 23 +- .../graphql/types/query/QueryType.java | 33 +- .../DataFlowDataJobsRelationshipsMapper.java | 36 +- .../DownstreamEntityRelationshipsMapper.java | 36 +- .../EntityRelationshipLegacyMapper.java | 38 +- .../UpstreamEntityRelationshipsMapper.java | 34 +- .../graphql/types/role/DataHubRoleType.java | 26 +- .../types/role/mappers/DataHubRoleMapper.java | 5 +- .../graphql/types/rolemetadata/RoleType.java | 158 +- .../rolemetadata/mappers/AccessMapper.java | 60 +- .../rolemetadata/mappers/RoleMapper.java | 120 +- .../types/schemafield/SchemaFieldType.java | 25 +- .../datahub/graphql/types/tag/TagType.java | 278 +- .../types/tag/mappers/GlobalTagsMapper.java | 51 +- .../mappers/TagAssociationUpdateMapper.java | 35 +- .../graphql/types/tag/mappers/TagMapper.java | 88 +- .../tag/mappers/TagUpdateInputMapper.java | 21 +- .../graphql/types/test/TestMapper.java | 14 +- .../datahub/graphql/types/test/TestType.java | 39 +- .../timeline/mappers/SchemaBlameMapper.java | 72 +- .../mappers/SchemaVersionListMapper.java | 50 +- .../types/timeline/utils/TimelineUtils.java | 30 +- .../types/usage/FieldUsageCountsMapper.java | 7 +- .../types/usage/UsageAggregationMapper.java | 11 +- .../usage/UsageAggregationMetricsMapper.java | 23 +- .../UsageQueryResultAggregationMapper.java | 27 +- .../types/usage/UsageQueryResultMapper.java | 17 +- .../types/usage/UserUsageCountsMapper.java | 12 +- .../graphql/types/view/DataHubViewMapper.java | 38 +- .../graphql/types/view/DataHubViewType.java | 26 +- .../datahub/graphql/util/DateUtil.java | 51 +- .../graphql/util/SearchInsightsUtil.java | 3 +- .../linkedin/datahub/graphql/TestUtils.java | 92 +- .../graphql/resolvers/ResolverUtilsTest.java | 112 +- .../resolvers/UpdateLineageResolverTest.java | 78 +- .../AssertionRunEventResolverTest.java | 102 +- .../DeleteAssertionResolverTest.java | 154 +- .../EntityAssertionsResolverTest.java | 179 +- .../auth/ListAccessTokensResolverTest.java | 31 +- .../browse/BrowseV2ResolverTest.java | 230 +- .../browse/EntityBrowsePathsResolverTest.java | 20 +- .../ContainerEntitiesResolverTest.java | 78 +- .../ParentContainersResolverTest.java | 149 +- .../dashboard/DashboardStatsSummaryTest.java | 162 +- .../dataset/DatasetHealthResolverTest.java | 207 +- .../DatasetStatsSummaryResolverTest.java | 96 +- .../BatchUpdateSoftDeletedResolverTest.java | 122 +- .../BatchUpdateDeprecationResolverTest.java | 188 +- .../UpdateDeprecationResolverTest.java | 183 +- .../domain/BatchSetDomainResolverTest.java | 234 +- .../domain/CreateDomainProposalMatcher.java | 21 +- .../domain/CreateDomainResolverTest.java | 187 +- .../domain/DeleteDomainResolverTest.java | 44 +- .../domain/DomainEntitiesResolverTest.java | 83 +- .../domain/ListDomainsResolverTest.java | 141 +- .../domain/MoveDomainResolverTest.java | 67 +- .../domain/ParentDomainsResolverTest.java | 97 +- .../domain/SetDomainResolverTest.java | 196 +- .../domain/UnsetDomainResolverTest.java | 155 +- .../embed/UpdateEmbedResolverTest.java | 106 +- .../entity/EntityExistsResolverTest.java | 7 +- .../entity/EntityPrivilegesResolverTest.java | 24 +- .../glossary/AddRelatedTermsResolverTest.java | 107 +- .../CreateGlossaryNodeResolverTest.java | 80 +- .../CreateGlossaryTermResolverTest.java | 176 +- .../DeleteGlossaryEntityResolverTest.java | 35 +- .../GetRootGlossaryNodesResolverTest.java | 68 +- .../GetRootGlossaryTermsResolverTest.java | 63 +- .../resolvers/glossary/GlossaryUtilsTest.java | 176 +- .../glossary/ParentNodesResolverTest.java | 293 +- .../RemoveRelatedTermsResolverTest.java | 88 +- .../glossary/UpdateNameResolverTest.java | 68 +- .../UpdateParentNodeResolverTest.java | 66 +- .../group/AddGroupMembersResolverTest.java | 9 +- .../group/CreateGroupResolverTest.java | 9 +- .../group/RemoveGroupMembersResolverTest.java | 9 +- .../resolvers/ingest/IngestTestUtils.java | 44 +- .../ingest/IngestionAuthUtilsTest.java | 34 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...IngestionExecutionRequestResolverTest.java | 75 +- ...eateTestConnectionRequestResolverTest.java | 37 +- ...IngestionExecutionRequestResolverTest.java | 82 +- ...onSourceExecutionRequestsResolverTest.java | 138 +- .../RollbackIngestionResolverTest.java | 28 +- .../CreateSecretResolverMatcherTest.java | 19 +- .../secret/CreateSecretResolverTest.java | 58 +- .../secret/DeleteSecretResolverTest.java | 16 +- .../secret/GetSecretValuesResolverTest.java | 66 +- .../secret/ListSecretsResolverTest.java | 121 +- .../DeleteIngestionSourceResolverTest.java | 25 +- .../GetIngestionSourceResolverTest.java | 65 +- .../ListIngestionSourceResolverTest.java | 126 +- .../UpsertIngestionSourceResolverTest.java | 77 +- .../mutate/MutableTypeBatchResolverTest.java | 269 +- .../resolvers/mutate/SiblingsUtilsTest.java | 58 +- .../mutate/UpdateUserSettingResolverTest.java | 17 +- .../ReportOperationResolverTest.java | 57 +- .../owner/AddOwnersResolverTest.java | 333 +- .../owner/BatchAddOwnersResolverTest.java | 353 +- .../owner/BatchRemoveOwnersResolverTest.java | 161 +- .../CreateOwnershipTypeResolverTest.java | 52 +- .../DeleteOwnershipTypeResolverTest.java | 49 +- .../ListOwnershipTypesResolverTest.java | 82 +- .../UpdateOwnershipTypeResolverTest.java | 95 +- .../post/CreatePostResolverTest.java | 51 +- .../post/DeletePostResolverTest.java | 9 +- .../resolvers/post/ListPostsResolverTest.java | 60 +- .../query/CreateQueryResolverTest.java | 253 +- .../query/DeleteQueryResolverTest.java | 102 +- .../query/ListQueriesResolverTest.java | 119 +- .../query/UpdateQueryResolverTest.java | 288 +- .../role/AcceptRoleResolverTest.java | 24 +- .../role/BatchAssignRoleResolverTest.java | 9 +- .../role/CreateInviteTokenResolverTest.java | 15 +- .../role/GetInviteTokenResolverTest.java | 15 +- .../resolvers/role/ListRolesResolverTest.java | 60 +- .../AggregateAcrossEntitiesResolverTest.java | 487 ++- .../AutoCompleteForMultipleResolverTest.java | 243 +- .../search/GetQuickFiltersResolverTest.java | 214 +- .../SearchAcrossEntitiesResolverTest.java | 686 ++-- .../SearchAcrossLineageResolverTest.java | 42 +- .../resolvers/search/SearchResolverTest.java | 299 +- .../resolvers/search/SearchUtilsTest.java | 580 +-- ...dateCorpUserViewsSettingsResolverTest.java | 152 +- .../view/GlobalViewsSettingsResolverTest.java | 47 +- ...UpdateGlobalViewsSettingsResolverTest.java | 100 +- .../step/BatchGetStepStatesResolverTest.java | 51 +- .../BatchUpdateStepStatesResolverTest.java | 12 +- .../resolvers/tag/AddTagsResolverTest.java | 156 +- .../tag/BatchAddTagsResolverTest.java | 255 +- .../tag/BatchRemoveTagsResolverTest.java | 206 +- .../resolvers/tag/CreateTagResolverTest.java | 49 +- .../resolvers/tag/DeleteTagResolverTest.java | 21 +- .../tag/SetTagColorResolverTest.java | 108 +- .../resolvers/term/AddTermsResolverTest.java | 169 +- .../term/BatchAddTermsResolverTest.java | 195 +- .../term/BatchRemoveTermsResolverTest.java | 165 +- .../test/CreateTestResolverTest.java | 46 +- .../test/DeleteTestResolverTest.java | 21 +- .../resolvers/test/ListTestsResolverTest.java | 82 +- .../test/UpdateTestResolverTest.java | 42 +- ...reateNativeUserResetTokenResolverTest.java | 15 +- .../view/CreateViewResolverTest.java | 172 +- .../view/DeleteViewResolverTest.java | 70 +- .../view/ListGlobalViewsResolverTest.java | 80 +- .../view/ListMyViewsResolverTest.java | 156 +- .../view/UpdateViewResolverTest.java | 261 +- .../graphql/resolvers/view/ViewUtilsTest.java | 159 +- .../types/assertion/AssertionTypeTest.java | 102 +- .../types/container/ContainerTypeTest.java | 187 +- .../DataPlatformInstanceTest.java | 356 +- .../dataset/mappers/DatasetMapperTest.java | 293 +- .../mappers/DatasetProfileMapperTest.java | 322 +- .../graphql/types/domain/DomainTypeTest.java | 128 +- .../types/notebook/NotebookTypeTest.java | 255 +- .../graphql/types/query/QueryTypeTest.java | 277 +- .../types/view/DataHubViewTypeTest.java | 303 +- .../datahub/graphql/utils/DateUtilTest.java | 67 +- .../graphql/utils/MutationsUtilsTest.java | 28 +- .../utils/SystemMetadataUtilsTest.java | 105 +- .../com/linkedin/datahub/upgrade/Upgrade.java | 19 +- .../datahub/upgrade/UpgradeCleanupStep.java | 12 +- .../linkedin/datahub/upgrade/UpgradeCli.java | 7 +- .../upgrade/UpgradeCliApplication.java | 23 +- .../datahub/upgrade/UpgradeContext.java | 26 +- .../datahub/upgrade/UpgradeManager.java | 14 +- .../datahub/upgrade/UpgradeReport.java | 18 +- .../datahub/upgrade/UpgradeResult.java | 29 +- .../linkedin/datahub/upgrade/UpgradeStep.java | 24 +- .../datahub/upgrade/UpgradeStepResult.java | 43 +- .../datahub/upgrade/UpgradeUtils.java | 6 +- .../common/steps/ClearGraphServiceStep.java | 1 - .../common/steps/ClearSearchServiceStep.java | 4 +- .../common/steps/GMSDisableWriteModeStep.java | 1 - .../common/steps/GMSEnableWriteModeStep.java | 1 - .../common/steps/GMSQualificationStep.java | 64 +- .../config/BackfillBrowsePathsV2Config.java | 4 +- .../upgrade/config/BuildIndicesConfig.java | 22 +- .../upgrade/config/CleanIndicesConfig.java | 22 +- .../upgrade/config/NoCodeCleanupConfig.java | 15 +- .../upgrade/config/NoCodeUpgradeConfig.java | 7 +- .../config/RemoveUnknownAspectsConfig.java | 1 - .../upgrade/config/RestoreBackupConfig.java | 21 +- .../upgrade/config/RestoreIndicesConfig.java | 11 +- .../upgrade/config/SystemUpdateConfig.java | 31 +- .../upgrade/impl/DefaultUpgradeContext.java | 1 - .../upgrade/impl/DefaultUpgradeManager.java | 59 +- .../upgrade/impl/DefaultUpgradeReport.java | 3 +- .../upgrade/impl/DefaultUpgradeResult.java | 1 - .../impl/DefaultUpgradeStepResult.java | 1 - .../upgrade/nocode/CreateAspectTableStep.java | 60 +- .../upgrade/nocode/DataMigrationStep.java | 103 +- .../datahub/upgrade/nocode/NoCodeUpgrade.java | 5 +- .../nocode/RemoveAspectV2TableStep.java | 5 +- .../nocode/UpgradeQualificationStep.java | 15 +- .../nocodecleanup/DeleteAspectTableStep.java | 5 +- .../DeleteLegacyGraphRelationshipsStep.java | 8 +- .../DeleteLegacySearchIndicesStep.java | 4 +- .../nocodecleanup/NoCodeCleanupUpgrade.java | 15 +- .../NoCodeUpgradeQualificationStep.java | 17 +- .../RemoveClientIdAspectStep.java | 8 +- .../RemoveUnknownAspects.java | 1 - .../restorebackup/ClearAspectV2TableStep.java | 5 +- .../upgrade/restorebackup/RestoreBackup.java | 4 +- .../restorebackup/RestoreStorageStep.java | 83 +- .../backupreader/BackupReader.java | 6 +- .../backupreader/BackupReaderArgs.java | 9 +- .../EbeanAspectBackupIterator.java | 20 +- .../backupreader/LocalParquetReader.java | 17 +- .../backupreader/ParquetReaderWrapper.java | 26 +- .../backupreader/ReaderWrapper.java | 17 +- .../restoreindices/RestoreIndices.java | 15 +- .../upgrade/restoreindices/SendMAEStep.java | 99 +- .../datahub/upgrade/system/SystemUpdate.java | 75 +- .../system/elasticsearch/BuildIndices.java | 79 +- .../system/elasticsearch/CleanIndices.java | 56 +- .../steps/BuildIndicesPostStep.java | 35 +- .../steps/BuildIndicesPreStep.java | 62 +- .../elasticsearch/steps/BuildIndicesStep.java | 2 - .../elasticsearch/steps/CleanIndicesStep.java | 79 +- .../steps/DataHubStartupStep.java | 8 +- .../system/elasticsearch/util/IndexUtils.java | 45 +- .../entity/steps/BackfillBrowsePathsV2.java | 1 - .../steps/BackfillBrowsePathsV2Step.java | 90 +- .../DatahubUpgradeNoSchemaRegistryTest.java | 105 +- .../upgrade/UpgradeCliApplicationTest.java | 69 +- ...pgradeCliApplicationTestConfiguration.java | 24 +- docker/build.gradle | 2 +- .../linkedin/metadata/models/AspectSpec.java | 50 +- .../metadata/models/ConfigEntitySpec.java | 12 +- .../metadata/models/DataSchemaFactory.java | 60 +- .../metadata/models/DefaultEntitySpec.java | 5 +- .../metadata/models/DefaultEventSpec.java | 1 - .../linkedin/metadata/models/EntitySpec.java | 5 +- .../metadata/models/EntitySpecBuilder.java | 252 +- .../metadata/models/EntitySpecUtils.java | 18 +- .../linkedin/metadata/models/EventSpec.java | 17 +- .../metadata/models/EventSpecBuilder.java | 25 +- .../linkedin/metadata/models/FieldSpec.java | 12 +- .../metadata/models/FieldSpecUtils.java | 21 +- .../models/ModelValidationException.java | 4 +- .../metadata/models/PartialEntitySpec.java | 20 +- .../models/PropertyOverrideComparator.java | 1 - .../models/RelationshipFieldSpec.java | 9 +- .../RelationshipFieldSpecExtractor.java | 45 +- .../metadata/models/SearchScoreFieldSpec.java | 3 +- .../models/SearchScoreFieldSpecExtractor.java | 25 +- .../metadata/models/SearchableFieldSpec.java | 3 +- .../models/SearchableFieldSpecExtractor.java | 127 +- .../models/TimeseriesFieldCollectionSpec.java | 3 +- .../metadata/models/TimeseriesFieldSpec.java | 3 +- .../models/TimeseriesFieldSpecExtractor.java | 84 +- .../models/annotation/AnnotationUtils.java | 4 +- .../models/annotation/AspectAnnotation.java | 23 +- .../models/annotation/EntityAnnotation.java | 25 +- .../models/annotation/EventAnnotation.java | 23 +- .../annotation/RelationshipAnnotation.java | 49 +- .../annotation/SearchScoreAnnotation.java | 24 +- .../annotation/SearchableAnnotation.java | 62 +- .../annotation/TimeseriesFieldAnnotation.java | 21 +- .../TimeseriesFieldCollectionAnnotation.java | 18 +- .../models/extractor/AspectExtractor.java | 28 +- .../models/extractor/FieldExtractor.java | 55 +- .../models/registry/ConfigEntityRegistry.java | 71 +- .../models/registry/EntityRegistry.java | 19 +- .../models/registry/EntityRegistryUtils.java | 12 +- .../models/registry/LineageRegistry.java | 103 +- .../models/registry/MergedEntityRegistry.java | 98 +- .../models/registry/PatchEntityRegistry.java | 141 +- .../registry/PluginEntityRegistryLoader.java | 151 +- .../registry/SnapshotEntityRegistry.java | 34 +- .../models/registry/config/Entity.java | 10 +- .../config/EntityRegistryLoadResult.java | 4 +- .../models/registry/config/Event.java | 2 +- .../template/ArrayMergingTemplate.java | 88 +- .../template/AspectTemplateEngine.java | 40 +- .../template/CompoundKeyTemplate.java | 17 +- .../models/registry/template/Template.java | 28 +- .../template/common/GlobalTagsTemplate.java | 4 +- .../common/GlossaryTermsTemplate.java | 24 +- .../template/common/OwnershipTemplate.java | 18 +- .../dataflow/DataFlowInfoTemplate.java | 1 - .../template/datajob/DataJobInfoTemplate.java | 1 - .../datajob/DataJobInputOutputTemplate.java | 70 +- .../DataProductPropertiesTemplate.java | 4 +- .../dataset/DatasetPropertiesTemplate.java | 1 - .../EditableSchemaMetadataTemplate.java | 86 +- .../dataset/UpstreamLineageTemplate.java | 8 +- .../registry/template/util/TemplateUtil.java | 28 +- .../models/DataSchemaFactoryTest.java | 16 +- .../models/EntitySpecBuilderTest.java | 392 +- .../registry/ConfigEntityRegistryTest.java | 23 +- .../models/registry/LineageRegistryTest.java | 72 +- .../registry/PatchEntityRegistryTest.java | 45 +- .../PluginEntityRegistryLoaderTest.java | 276 +- .../models/registry/TestConstants.java | 4 +- gradle/checkstyle/checkstyle.xml | 198 - gradle/checkstyle/suppressions.xml | 7 - .../ingestion/IngestionScheduler.java | 241 +- .../ingestion/IngestionSchedulerTest.java | 193 +- .../java/com/datahub/util/ModelUtils.java | 235 +- .../java/com/datahub/util/RecordUtils.java | 291 +- .../main/java/com/datahub/util/Statement.java | 1 - .../util/exception/ESQueryException.java | 4 +- .../exception/InvalidSchemaException.java | 4 +- .../exception/ModelConversionException.java | 4 +- .../util/validator/AspectValidator.java | 24 +- .../util/validator/DeltaValidator.java | 23 +- .../util/validator/DocumentValidator.java | 48 +- .../util/validator/EntityValidator.java | 68 +- .../util/validator/RelationshipValidator.java | 119 +- .../util/validator/SnapshotValidator.java | 56 +- .../util/validator/ValidationUtils.java | 140 +- .../java/com/linkedin/metadata/Constants.java | 99 +- .../java/com/linkedin/util/Configuration.java | 40 +- .../com/linkedin/common/uri/Uri.java | 48 +- .../com/linkedin/common/uri/UriCoercer.java | 19 +- .../com/linkedin/common/url/Url.java | 48 +- .../com/linkedin/common/url/UrlCoercer.java | 19 +- .../linkedin/common/urn/AzkabanFlowUrn.java | 31 +- .../linkedin/common/urn/AzkabanJobUrn.java | 30 +- .../com/linkedin/common/urn/ChartUrn.java | 30 +- .../com/linkedin/common/urn/CorpGroupUrn.java | 34 +- .../com/linkedin/common/urn/CorpuserUrn.java | 33 +- .../com/linkedin/common/urn/DashboardUrn.java | 30 +- .../com/linkedin/common/urn/DataFlowUrn.java | 31 +- .../com/linkedin/common/urn/DataJobUrn.java | 31 +- .../linkedin/common/urn/DataPlatformUrn.java | 28 +- .../linkedin/common/urn/DataProcessUrn.java | 37 +- .../linkedin/common/urn/DatasetFieldUrn.java | 62 +- .../com/linkedin/common/urn/DatasetUrn.java | 36 +- .../com/linkedin/common/urn/FabricUrn.java | 29 +- .../linkedin/common/urn/GlossaryNodeUrn.java | 94 +- .../linkedin/common/urn/GlossaryTermUrn.java | 35 +- .../com/linkedin/common/urn/MLFeatureUrn.java | 30 +- .../com/linkedin/common/urn/MLModelUrn.java | 40 +- .../com/linkedin/common/urn/NotebookUrn.java | 30 +- .../com/linkedin/common/urn/TagUrn.java | 94 +- .../linkedin/common/urn/TestEntityUrn.java | 101 +- .../com/linkedin/common/urn/TupleKey.java | 72 +- .../com/linkedin/common/urn/Urn.java | 140 +- .../com/linkedin/common/urn/UrnCoercer.java | 29 +- .../com/linkedin/common/urn/UrnUtils.java | 114 +- .../com/linkedin/common/urn/UrnValidator.java | 13 +- .../com/linkedin/common/urn/VersionedUrn.java | 99 +- .../common/urn/VersionedUrnUtils.java | 18 +- .../linkedin/util/VersionedUrnCoercer.java | 1 - .../common/urn/DatasetFieldUrnTest.java | 23 +- .../linkedin/common/util/ModelUtilsTest.java | 38 +- .../linkedin/common/util/RecordUtilsTest.java | 103 +- .../common/util/VersionedUrnUtilsTest.java | 5 +- .../com/datahub/authentication/Actor.java | 23 +- .../com/datahub/authentication/ActorType.java | 8 +- .../authentication/Authentication.java | 15 +- .../authentication/AuthenticationContext.java | 3 +- .../AuthenticationException.java | 1 - .../AuthenticationExpiredException.java | 1 - .../authentication/AuthenticationRequest.java | 11 +- .../authentication/AuthenticatorContext.java | 13 +- .../com/datahub/authorization/AuthUtil.java | 26 +- .../authorization/AuthorizationRequest.java | 19 +- .../authorization/AuthorizationResult.java | 29 +- .../authorization/AuthorizedActors.java | 1 - .../authorization/AuthorizerContext.java | 14 +- .../ConjunctivePrivilegeGroup.java | 6 +- .../DisjunctivePrivilegeGroup.java | 4 +- .../authorization/EntityFieldType.java | 30 +- .../com/datahub/authorization/EntitySpec.java | 22 +- .../authorization/EntitySpecResolver.java | 7 +- .../datahub/authorization/FieldResolver.java | 27 +- .../authorization/ResolvedEntitySpec.java | 23 +- .../main/java/com/datahub/plugins/Plugin.java | 7 +- .../com/datahub/plugins/PluginConstant.java | 3 +- .../auth/authentication/Authenticator.java | 30 +- .../auth/authorization/Authorizer.java | 22 +- .../producer/BaseMetadataEventProducer.java | 28 +- .../dao/producer/KafkaEventProducer.java | 70 +- .../dao/producer/KafkaHealthChecker.java | 181 +- .../producer/KafkaMetadataEventProducer.java | 62 +- .../dao/producer/KafkaProducerCallback.java | 1 - metadata-events/mxe-avro/build.gradle | 2 +- .../main/java/com/linkedin/mxe/Configs.java | 38 +- .../com/linkedin/mxe/TopicConvention.java | 52 +- .../com/linkedin/mxe/TopicConventionImpl.java | 64 +- .../main/java/com/linkedin/mxe/Topics.java | 20 +- .../com/linkedin/metadata/EventUtils.java | 188 +- .../linkedin/metadata/EventUtilsTests.java | 55 +- .../java/datahub-client/build.gradle | 2 - .../main/java/datahub/client/Callback.java | 9 +- .../src/main/java/datahub/client/Emitter.java | 54 +- .../client/MetadataResponseFuture.java | 7 +- .../datahub/client/MetadataWriteResponse.java | 18 +- .../java/datahub/client/file/FileEmitter.java | 117 +- .../client/file/FileEmitterConfig.java | 9 +- .../datahub/client/kafka/AvroSerializer.java | 27 +- .../datahub/client/kafka/KafkaEmitter.java | 67 +- .../client/kafka/KafkaEmitterConfig.java | 29 +- .../patch/AbstractMultiFieldPatchBuilder.java | 25 +- .../client/patch/PatchOperationType.java | 5 +- .../common/CustomPropertiesPatchBuilder.java | 29 +- .../patch/common/GlobalTagsPatchBuilder.java | 11 +- .../common/GlossaryTermsPatchBuilder.java | 14 +- .../patch/common/OwnershipPatchBuilder.java | 32 +- .../dataflow/DataFlowInfoPatchBuilder.java | 54 +- .../datajob/DataJobInfoPatchBuilder.java | 51 +- .../DataJobInputOutputPatchBuilder.java | 96 +- .../DatasetPropertiesPatchBuilder.java | 66 +- .../EditableSchemaMetadataPatchBuilder.java | 53 +- .../dataset/UpstreamLineagePatchBuilder.java | 25 +- .../CustomPropertiesPatchBuilderSupport.java | 8 +- .../IntermediatePatchBuilder.java | 13 +- .../java/datahub/client/rest/RestEmitter.java | 316 +- .../client/rest/RestEmitterConfig.java | 46 +- .../java/datahub/event/EventFormatter.java | 52 +- .../event/EventValidationException.java | 1 + .../event/MetadataChangeProposalWrapper.java | 17 +- .../java/datahub/event/StringEscapeUtils.java | 122 +- .../datahub/event/UpsertAspectRequest.java | 17 +- .../datahub/client/file/FileEmitterTest.java | 125 +- .../client/kafka/AvroSerializerTest.java | 17 +- .../client/kafka/KafkaEmitterTest.java | 73 +- .../kafka/containers/KafkaContainer.java | 71 +- .../containers/SchemaRegistryContainer.java | 76 +- .../client/kafka/containers/Utils.java | 25 +- .../kafka/containers/ZookeeperContainer.java | 80 +- .../java/datahub/client/patch/PatchTest.java | 354 +- .../datahub/client/rest/RestEmitterTest.java | 425 ++- .../datahub/event/EventFormatterTest.java | 44 +- .../MetadataChangeProposalWrapperTest.java | 121 +- .../datahub/server/TestDataHubServer.java | 24 +- .../google/protobuf/ExtensionRegistry.java | 543 ++- .../datahub/protobuf/DirectoryWalker.java | 67 +- .../java/datahub/protobuf/Proto2DataHub.java | 685 ++-- .../datahub/protobuf/ProtobufDataset.java | 465 +-- .../java/datahub/protobuf/ProtobufUtils.java | 354 +- .../datahub/protobuf/model/FieldTypeEdge.java | 75 +- .../protobuf/model/ProtobufElement.java | 45 +- .../datahub/protobuf/model/ProtobufEnum.java | 131 +- .../datahub/protobuf/model/ProtobufField.java | 452 +-- .../datahub/protobuf/model/ProtobufGraph.java | 800 ++-- .../protobuf/model/ProtobufMessage.java | 194 +- .../protobuf/model/ProtobufOneOfField.java | 87 +- .../visitors/ProtobufExtensionUtil.java | 307 +- .../visitors/ProtobufModelVisitor.java | 28 +- .../protobuf/visitors/VisitContext.java | 87 +- .../visitors/dataset/DatasetVisitor.java | 204 +- .../visitors/dataset/DeprecationVisitor.java | 77 +- .../visitors/dataset/DescriptionVisitor.java | 9 +- .../visitors/dataset/DomainVisitor.java | 23 +- .../dataset/InstitutionalMemoryVisitor.java | 216 +- .../dataset/KafkaTopicPropertyVisitor.java | 28 +- .../visitors/dataset/OwnershipVisitor.java | 78 +- .../visitors/dataset/PropertyVisitor.java | 61 +- .../dataset/TagAssociationVisitor.java | 20 +- .../dataset/TermAssociationVisitor.java | 17 +- .../field/ProtobufExtensionFieldVisitor.java | 119 +- .../visitors/field/SchemaFieldVisitor.java | 26 +- .../protobuf/visitors/tags/TagVisitor.java | 59 +- .../datahub/protobuf/ProtobufDatasetTest.java | 1113 ++++-- .../datahub/protobuf/ProtobufUtilsTest.java | 72 +- .../java/datahub/protobuf/TestFixtures.java | 115 +- .../protobuf/model/ProtobufEnumTest.java | 125 +- .../protobuf/model/ProtobufFieldTest.java | 398 +- .../protobuf/model/ProtobufGraphTest.java | 161 +- .../protobuf/model/ProtobufMessageTest.java | 318 +- .../model/ProtobufOneOfFieldTest.java | 219 +- .../protobuf/visitors/VisitContextTest.java | 53 +- .../visitors/dataset/DatasetVisitorTest.java | 85 +- .../dataset/DescriptionVisitorTest.java | 27 +- .../visitors/dataset/DomainVisitorTest.java | 29 +- .../InstitutionalMemoryVisitorTest.java | 110 +- .../KafkaTopicPropertyVisitorTest.java | 47 +- .../dataset/OwnershipVisitorTest.java | 88 +- .../visitors/dataset/PropertyVisitorTest.java | 100 +- .../dataset/TermAssociationVisitorTest.java | 58 +- .../ProtobufExtensionFieldVisitorTest.java | 445 ++- .../field/SchemaFieldVisitorTest.java | 107 +- .../protobuf/visitors/tag/TagVisitorTest.java | 132 +- .../examples/DataJobLineageAdd.java | 52 +- .../datahubproject/examples/DatasetAdd.java | 113 +- .../examples/DatasetCustomPropertiesAdd.java | 58 +- .../DatasetCustomPropertiesAddRemove.java | 53 +- .../DatasetCustomPropertiesReplace.java | 28 +- .../io/datahubproject/examples/TagCreate.java | 49 +- .../test/spark/lineage/HdfsIn2HdfsOut1.java | 32 +- .../test/spark/lineage/HdfsIn2HdfsOut2.java | 42 +- .../lineage/HdfsIn2HiveCreateInsertTable.java | 65 +- .../spark/lineage/HdfsIn2HiveCreateTable.java | 58 +- .../test/spark/lineage/HiveInHiveOut.java | 80 +- .../spark/lineage/HiveInHiveOut_test1.java | 79 +- .../main/java/test/spark/lineage/Utils.java | 2 +- .../datahub/spark/DatahubSparkListener.java | 346 +- .../java/datahub/spark/DatasetExtractor.java | 451 ++- .../consumer/impl/CoalesceJobsEmitter.java | 70 +- .../spark/consumer/impl/McpEmitter.java | 141 +- .../java/datahub/spark/model/AppEndEvent.java | 11 +- .../datahub/spark/model/AppStartEvent.java | 45 +- .../datahub/spark/model/DatasetLineage.java | 15 +- .../datahub/spark/model/LineageConsumer.java | 3 +- .../datahub/spark/model/LineageEvent.java | 5 +- .../datahub/spark/model/LineageUtils.java | 60 +- .../spark/model/SQLQueryExecEndEvent.java | 13 +- .../spark/model/SQLQueryExecStartEvent.java | 42 +- .../model/dataset/CatalogTableDataset.java | 11 +- .../spark/model/dataset/HdfsPathDataset.java | 27 +- .../spark/model/dataset/JdbcDataset.java | 9 +- .../spark/model/dataset/SparkDataset.java | 8 +- .../datahub/spark/TestCoalesceJobLineage.java | 126 +- .../datahub/spark/TestSparkJobsLineage.java | 293 +- .../aspect/utils/DeprecationUtils.java | 47 +- .../metadata/client/JavaEntityClient.java | 1290 ++++--- .../client/SystemJavaEntityClient.java | 48 +- .../com/linkedin/metadata/dao/AspectKey.java | 14 +- .../linkedin/metadata/dao/BaseReadDAO.java | 56 +- .../linkedin/metadata/entity/AspectDao.java | 280 +- .../metadata/entity/AspectMigrationsDao.java | 12 +- .../metadata/entity/EntityAspect.java | 37 +- .../entity/EntityAspectIdentifier.java | 12 +- .../metadata/entity/EntityServiceImpl.java | 1983 ++++++---- .../linkedin/metadata/entity/EntityUtils.java | 104 +- .../metadata/entity/NewModelUtils.java | 60 +- .../AspectStorageValidationUtil.java | 13 +- .../entity/cassandra/CassandraAspect.java | 22 +- .../entity/cassandra/CassandraAspectDao.java | 430 ++- .../cassandra/CassandraRetentionService.java | 195 +- .../ebean/AspectStorageValidationUtil.java | 18 +- .../metadata/entity/ebean/EbeanAspectDao.java | 445 ++- .../metadata/entity/ebean/EbeanAspectV1.java | 14 +- .../metadata/entity/ebean/EbeanAspectV2.java | 41 +- .../entity/ebean/EbeanRetentionService.java | 214 +- .../ebean/transactions/AspectsBatchImpl.java | 92 +- .../ebean/transactions/PatchBatchItem.java | 304 +- .../ebean/transactions/UpsertBatchItem.java | 282 +- .../EntityRegistryUrnValidator.java | 50 +- .../validation/RecordTemplateValidator.java | 72 +- .../validation/ValidationException.java | 4 +- .../entity/validation/ValidationUtils.java | 60 +- .../metadata/event/EntityEventProducer.java | 23 +- .../metadata/event/EventProducer.java | 39 +- .../metadata/graph/JavaGraphClient.java | 77 +- .../metadata/graph/SiblingGraphService.java | 220 +- .../metadata/graph/dgraph/DgraphExecutor.java | 147 +- .../graph/dgraph/DgraphGraphService.java | 1261 +++--- .../metadata/graph/dgraph/DgraphSchema.java | 216 +- .../graph/elastic/ESGraphQueryDAO.java | 365 +- .../graph/elastic/ESGraphWriteDAO.java | 36 +- .../elastic/ElasticSearchGraphService.java | 161 +- .../GraphRelationshipMappingsBuilder.java | 17 +- .../graph/elastic/TimeFilterUtils.java | 90 +- .../graph/neo4j/Neo4jGraphService.java | 470 ++- .../candidatesource/MostPopularSource.java | 73 +- .../candidatesource/RecentlyEditedSource.java | 82 +- .../candidatesource/RecentlyViewedSource.java | 85 +- .../search/EntityLineageResultCacheKey.java | 23 +- .../metadata/search/LineageSearchService.java | 631 +-- .../metadata/search/SearchService.java | 195 +- .../search/cache/CacheableSearcher.java | 40 +- .../cache/CachedEntityLineageResult.java | 7 +- .../search/cache/EntityDocCountCache.java | 22 +- .../client/CachingEntitySearchService.java | 232 +- .../elasticsearch/ElasticSearchService.java | 165 +- .../indexbuilder/ESIndexBuilder.java | 400 +- .../indexbuilder/EntityIndexBuilders.java | 68 +- .../indexbuilder/MappingsBuilder.java | 148 +- .../indexbuilder/ReindexConfig.java | 446 ++- .../indexbuilder/SettingsBuilder.java | 286 +- .../elasticsearch/query/ESBrowseDAO.java | 194 +- .../elasticsearch/query/ESSearchDAO.java | 280 +- .../request/AggregationQueryBuilder.java | 49 +- .../request/AutocompleteRequestHandler.java | 99 +- .../query/request/CustomizedQueryHandler.java | 49 +- .../query/request/PITAwareSearchRequest.java | 1 - .../query/request/SearchAfterWrapper.java | 10 +- .../query/request/SearchFieldConfig.java | 303 +- .../query/request/SearchQueryBuilder.java | 488 ++- .../query/request/SearchRequestHandler.java | 365 +- .../elasticsearch/update/BulkListener.java | 50 +- .../elasticsearch/update/ESBulkProcessor.java | 318 +- .../elasticsearch/update/ESWriteDAO.java | 27 +- .../search/features/FeatureExtractor.java | 9 +- .../metadata/search/features/Features.java | 11 +- .../metadata/search/ranker/SearchRanker.java | 50 +- .../metadata/search/ranker/SimpleRanker.java | 4 +- .../SearchDocumentTransformer.java | 182 +- .../search/utils/BrowsePathUtils.java | 210 +- .../search/utils/BrowsePathV2Utils.java | 115 +- .../metadata/search/utils/ESUtils.java | 336 +- .../metadata/search/utils/FilterUtils.java | 17 +- .../metadata/search/utils/GZIPUtil.java | 6 +- .../metadata/search/utils/SearchUtils.java | 95 +- .../service/UpdateIndicesService.java | 310 +- .../metadata/shared/ElasticSearchIndexed.java | 24 +- .../systemmetadata/ESSystemMetadataDAO.java | 62 +- .../ElasticSearchSystemMetadataService.java | 129 +- .../systemmetadata/SystemMetadataEntry.java | 1 - .../SystemMetadataMappingsBuilder.java | 3 +- .../timeline/MissingEntityAspect.java | 3 +- .../timeline/TimelineServiceImpl.java | 388 +- .../DatasetSchemaFieldChangeEvent.java | 10 +- .../SchemaFieldGlossaryTermChangeEvent.java | 13 +- .../schema/SchemaFieldTagChangeEvent.java | 13 +- .../data/entity/DomainChangeEvent.java | 11 +- .../data/entity/GlossaryTermChangeEvent.java | 11 +- .../data/entity/OwnerChangeEvent.java | 9 +- .../timeline/data/entity/TagChangeEvent.java | 11 +- .../timeline/eventgenerator/Aspect.java | 13 +- ...AssertionRunEventChangeEventGenerator.java | 28 +- .../ChangeEventGeneratorUtils.java | 88 +- ...sInstanceRunEventChangeEventGenerator.java | 37 +- ...DatasetPropertiesChangeEventGenerator.java | 94 +- .../DeprecationChangeEventGenerator.java | 32 +- ...DatasetPropertiesChangeEventGenerator.java | 69 +- ...bleSchemaMetadataChangeEventGenerator.java | 209 +- .../EntityChangeEventGenerator.java | 36 +- .../EntityChangeEventGeneratorFactory.java | 15 +- .../EntityChangeEventGeneratorRegistry.java | 22 +- .../EntityKeyChangeEventGenerator.java | 7 +- .../GlobalTagsChangeEventGenerator.java | 124 +- .../GlossaryTermInfoChangeEventGenerator.java | 187 +- .../GlossaryTermsChangeEventGenerator.java | 153 +- ...stitutionalMemoryChangeEventGenerator.java | 171 +- .../OwnershipChangeEventGenerator.java | 173 +- .../SchemaMetadataChangeEventGenerator.java | 427 ++- .../SingleDomainChangeEventGenerator.java | 28 +- .../StatusChangeEventGenerator.java | 30 +- .../ElasticSearchTimeseriesAspectService.java | 195 +- .../elastic/indexbuilder/MappingsBuilder.java | 37 +- .../TimeseriesAspectIndexBuilders.java | 44 +- .../elastic/query/ESAggregatedStatsDAO.java | 192 +- .../TimeseriesAspectTransformer.java | 135 +- .../linkedin/metadata/version/GitVersion.java | 1 - .../metadata/AspectGenerationUtils.java | 10 +- .../metadata/AspectIngestionUtils.java | 57 +- .../linkedin/metadata/AspectUtilsTest.java | 24 +- .../linkedin/metadata/CassandraTestUtils.java | 96 +- .../linkedin/metadata/DockerTestUtils.java | 24 +- .../com/linkedin/metadata/EbeanTestUtils.java | 7 +- .../metadata/TestEntitySpecBuilder.java | 4 +- .../com/linkedin/metadata/TestEntityUtil.java | 65 +- .../metadata/client/JavaEntityClientTest.java | 223 +- .../update/BulkListenerTest.java | 52 +- .../update/ESBulkProcessorTest.java | 18 +- .../entity/AspectMigrationsDaoTest.java | 22 +- .../CassandraAspectMigrationsDaoTest.java | 24 +- .../entity/CassandraEntityServiceTest.java | 83 +- .../entity/DeleteEntityServiceTest.java | 70 +- .../entity/DeleteEntityUtilsTest.java | 264 +- .../entity/EbeanAspectMigrationsDaoTest.java | 36 +- .../entity/EbeanEntityServiceTest.java | 287 +- .../metadata/entity/EntityServiceTest.java | 2909 +++++++------- .../metadata/entity/TestEntityRegistry.java | 9 +- .../extractor/AspectExtractorTest.java | 8 +- .../extractor/FieldExtractorTest.java | 82 +- .../com/linkedin/metadata/graph/EdgeTest.java | 77 +- .../metadata/graph/GraphServiceTestBase.java | 2449 ++++++------ .../graph/dgraph/DgraphContainer.java | 419 +- .../graph/dgraph/DgraphGraphServiceTest.java | 1390 +++---- .../graph/neo4j/Neo4jGraphServiceTest.java | 199 +- .../graph/neo4j/Neo4jTestServerBuilder.java | 7 +- .../graph/search/ESGraphQueryDAOTest.java | 165 +- .../search/SearchGraphServiceTestBase.java | 309 +- .../graph/search/TimeFilterUtilsTest.java | 7 +- .../SearchGraphServiceElasticSearchTest.java | 11 +- .../SearchGraphServiceOpenSearchTest.java | 10 +- .../sibling/SiblingGraphServiceTest.java | 523 +-- .../RecommendationsServiceTest.java | 91 +- ...ySearchAggregationCandidateSourceTest.java | 63 +- .../RecommendationUtilsTest.java | 13 +- .../candidatesource/TestSource.java | 8 +- .../LineageSearchResultCacheKeyTest.java | 30 +- .../search/LineageServiceTestBase.java | 1019 +++-- .../search/SearchServiceTestBase.java | 273 +- .../metadata/search/TestEntityTestBase.java | 130 +- .../search/cache/CacheableSearcherTest.java | 126 +- .../elasticsearch/ElasticSearchSuite.java | 32 +- .../GoldenElasticSearchTest.java | 63 +- .../IndexBuilderElasticSearchTest.java | 26 +- .../LineageDataFixtureElasticSearchTest.java | 59 +- .../LineageServiceElasticSearchTest.java | 24 +- .../SampleDataFixtureElasticSearchTest.java | 45 +- .../SearchDAOElasticSearchTest.java | 24 +- .../SearchServiceElasticSearchTest.java | 25 +- ...ystemMetadataServiceElasticSearchTest.java | 12 +- .../TestEntityElasticSearchTest.java | 21 +- ...eseriesAspectServiceElasticSearchTest.java | 13 +- .../search/fixtures/GoldenTestBase.java | 297 +- .../fixtures/LineageDataFixtureTestBase.java | 83 +- .../fixtures/SampleDataFixtureTestBase.java | 3277 +++++++++------- .../indexbuilder/IndexBuilderTestBase.java | 421 +- .../indexbuilder/MappingsBuilderTest.java | 67 +- .../opensearch/GoldenOpenSearchTest.java | 59 +- .../IndexBuilderOpenSearchTest.java | 26 +- .../LineageDataFixtureOpenSearchTest.java | 55 +- .../LineageServiceOpenSearchTest.java | 21 +- .../search/opensearch/OpenSearchSuite.java | 31 +- .../SampleDataFixtureOpenSearchTest.java | 44 +- .../opensearch/SearchDAOOpenSearchTest.java | 18 +- .../SearchServiceOpenSearchTest.java | 21 +- .../SystemMetadataServiceOpenSearchTest.java | 10 +- .../opensearch/TestEntityOpenSearchTest.java | 21 +- ...TimeseriesAspectServiceOpenSearchTest.java | 9 +- .../metadata/search/query/BrowseDAOTest.java | 42 +- .../search/query/SearchDAOTestBase.java | 647 ++-- .../request/AggregationQueryBuilderTest.java | 149 +- .../AutocompleteRequestHandlerTest.java | 19 +- .../request/CustomizedQueryHandlerTest.java | 357 +- .../query/request/SearchQueryBuilderTest.java | 383 +- .../request/SearchRequestHandlerTest.java | 576 +-- .../SearchDocumentTransformerTest.java | 65 +- .../search/utils/BrowsePathUtilsTest.java | 65 +- .../search/utils/BrowsePathV2UtilsTest.java | 167 +- .../metadata/search/utils/ESUtilsTest.java | 361 +- .../search/utils/SearchUtilsTest.java | 207 +- .../SystemMetadataServiceTestBase.java | 28 +- .../CassandraTimelineServiceTest.java | 32 +- .../timeline/EbeanTimelineServiceTest.java | 35 +- .../timeline/TimelineServiceTest.java | 100 +- ...chemaMetadataChangeEventGeneratorTest.java | 64 +- .../TimeseriesAspectServiceTestBase.java | 1025 +++-- .../io/datahubproject/test/DataGenerator.java | 687 ++-- .../test/fixtures/search/EntityExporter.java | 87 +- .../test/fixtures/search/FixtureReader.java | 170 +- .../test/fixtures/search/FixtureWriter.java | 111 +- .../test/fixtures/search/LineageExporter.java | 359 +- .../SampleDataFixtureConfiguration.java | 493 +-- .../fixtures/search/SearchFixtureUtils.java | 243 +- .../SearchLineageFixtureConfiguration.java | 378 +- .../test/models/Anonymized.java | 75 +- .../test/models/DatasetAnonymized.java | 66 +- .../test/models/GraphAnonymized.java | 20 +- .../search/ElasticsearchTestContainer.java | 70 +- .../test/search/OpenSearchTestContainer.java | 73 +- .../test/search/SearchTestContainer.java | 11 +- .../test/search/SearchTestUtils.java | 279 +- .../config/SearchCommonTestConfiguration.java | 89 +- .../SearchTestContainerConfiguration.java | 127 +- .../kafka/MaeConsumerApplication.java | 43 +- .../kafka/MaeConsumerApplicationTest.java | 11 +- ...eConsumerApplicationTestConfiguration.java | 27 +- .../kafka/DataHubUsageEventsProcessor.java | 30 +- .../metadata/kafka/MclConsumerConfig.java | 16 +- .../kafka/MetadataChangeLogProcessor.java | 55 +- .../boot/ApplicationStartupListener.java | 12 +- .../boot/MCLBootstrapManagerFactory.java | 8 +- .../DataHubUsageEventsProcessorCondition.java | 11 +- .../kafka/config/EntityHydratorConfig.java | 12 +- .../MetadataChangeLogProcessorCondition.java | 5 +- .../kafka/elasticsearch/ElasticEvent.java | 2 +- .../elasticsearch/ElasticsearchConnector.java | 19 +- .../ElasticsearchConnectorFactory.java | 7 +- .../kafka/elasticsearch/JsonElasticEvent.java | 17 +- .../kafka/elasticsearch/MCEElasticEvent.java | 20 +- .../kafka/hook/MetadataChangeLogHook.java | 17 +- .../kafka/hook/UpdateIndicesHook.java | 18 +- .../event/EntityChangeEventGeneratorHook.java | 120 +- .../ingestion/IngestionSchedulerHook.java | 45 +- .../hook/siblings/SiblingAssociationHook.java | 284 +- .../metadata/kafka/hydrator/BaseHydrator.java | 9 +- .../kafka/hydrator/ChartHydrator.java | 16 +- .../kafka/hydrator/CorpUserHydrator.java | 15 +- .../kafka/hydrator/DashboardHydrator.java | 16 +- .../kafka/hydrator/DataFlowHydrator.java | 16 +- .../kafka/hydrator/DataJobHydrator.java | 16 +- .../kafka/hydrator/DatasetHydrator.java | 5 +- .../kafka/hydrator/EntityHydrator.java | 28 +- .../DataHubUsageEventTransformer.java | 48 +- .../kafka/hook/EntityRegistryTestUtil.java | 20 +- .../kafka/hook/GraphIndexUtilsTest.java | 102 +- .../hook/MCLProcessingTestDataGenerator.java | 24 +- .../kafka/hook/UpdateIndicesHookTest.java | 227 +- .../EntityChangeEventGeneratorHookTest.java | 318 +- .../hook/event/PlatformEventMatcher.java | 37 +- .../ingestion/IngestionSchedulerHookTest.java | 34 +- .../siblings/SiblingAssociationHookTest.java | 311 +- .../kafka/hook/spring/MCLSpringTest.java | 37 +- .../spring/MCLSpringTestConfiguration.java | 40 +- .../kafka/MceConsumerApplication.java | 55 +- .../metadata/restli/EbeanServerConfig.java | 99 +- .../metadata/restli/RestliServletConfig.java | 78 +- .../kafka/MceConsumerApplicationTest.java | 39 +- ...eConsumerApplicationTestConfiguration.java | 44 +- .../metadata/kafka/McpConsumerConfig.java | 16 +- .../kafka/MetadataChangeEventsProcessor.java | 52 +- .../MetadataChangeProposalsProcessor.java | 45 +- .../boot/ApplicationStartupListener.java | 14 +- .../boot/MCPBootstrapManagerFactory.java | 13 +- ...adataChangeProposalProcessorCondition.java | 5 +- .../datahub/event/PlatformEventProcessor.java | 22 +- .../datahub/event/hook/PlatformEventHook.java | 13 +- .../model/validation/ModelValidationTask.java | 40 +- metadata-models/build.gradle | 4 - .../linkedin/metadata/ModelValidation.java | 28 +- .../metadata/ModelValidationConstants.java | 1 - .../AuthenticationConfiguration.java | 26 +- .../AuthenticationConstants.java | 23 +- .../AuthenticatorConfiguration.java | 11 +- .../TokenServiceConfiguration.java | 5 +- .../AuthorizationConfiguration.java | 16 +- .../AuthorizerConfiguration.java | 19 +- .../DefaultAuthorizerConfiguration.java | 10 +- .../filter/AuthenticationFilter.java | 217 +- .../authentication/AuthTestConfiguration.java | 22 +- .../AuthenticationFilterTest.java | 36 +- .../authenticator/AuthenticatorChain.java | 73 +- .../DataHubJwtTokenAuthenticator.java | 67 +- .../DataHubSystemAuthenticator.java | 59 +- .../DataHubTokenAuthenticator.java | 56 +- .../HealthStatusAuthenticator.java | 41 +- .../authenticator/NoOpAuthenticator.java | 39 +- .../authentication/group/GroupService.java | 125 +- .../invite/InviteTokenService.java | 76 +- .../authentication/post/PostService.java | 33 +- .../token/DataHubJwtSigningKeyResolver.java | 33 +- .../token/StatefulTokenService.java | 92 +- .../token/StatelessTokenService.java | 71 +- .../authentication/token/TokenClaims.java | 58 +- .../authentication/token/TokenException.java | 4 +- .../token/TokenExpiredException.java | 4 +- .../authentication/token/TokenType.java | 12 +- .../authentication/token/TokenVersion.java | 45 +- .../user/NativeUserService.java | 83 +- .../authorization/AuthorizerChain.java | 51 +- .../authorization/DataHubAuthorizer.java | 144 +- .../DefaultEntitySpecResolver.java | 12 +- .../datahub/authorization/FilterUtils.java | 41 +- .../datahub/authorization/PolicyEngine.java | 149 +- .../datahub/authorization/PolicyFetcher.java | 123 +- ...PlatformInstanceFieldResolverProvider.java | 31 +- .../DomainFieldResolverProvider.java | 74 +- .../EntityFieldResolverProvider.java | 12 +- .../EntityTypeFieldResolverProvider.java | 7 +- .../EntityUrnFieldResolverProvider.java | 7 +- .../GroupMembershipFieldResolverProvider.java | 45 +- .../OwnerFieldResolverProvider.java | 20 +- .../authorization/role/RoleService.java | 49 +- .../datahub/telemetry/TrackingService.java | 94 +- .../authenticator/AuthenticatorChainTest.java | 28 +- .../DataHubJwtTokenAuthenticatorTest.java | 24 +- .../DataHubSystemAuthenticatorTest.java | 110 +- .../DataHubTokenAuthenticatorTest.java | 291 +- .../group/GroupServiceTest.java | 186 +- .../invite/InviteTokenServiceTest.java | 125 +- .../authentication/post/PostServiceTest.java | 25 +- .../DataHubJwtSigningKeyResolverTest.java | 35 +- .../token/StatefulTokenServiceTest.java | 97 +- .../token/StatelessTokenServiceTest.java | 78 +- .../user/NativeUserServiceTest.java | 163 +- .../authorization/DataHubAuthorizerTest.java | 496 ++- .../authorization/PolicyEngineTest.java | 593 ++- .../authorization/RoleServiceTest.java | 44 +- ...formInstanceFieldResolverProviderTest.java | 152 +- ...upMembershipFieldResolverProviderTest.java | 210 +- .../telemetry/TrackingServiceTest.java | 70 +- .../authentication/AuthServiceController.java | 300 +- .../metadata/config/AssetsConfiguration.java | 6 +- .../config/AuthPluginConfiguration.java | 7 +- .../metadata/config/DataHubConfiguration.java | 10 +- .../metadata/config/EntityProfileConfig.java | 4 +- .../EntityRegistryPluginConfiguration.java | 3 +- .../config/IngestionConfiguration.java | 14 +- .../metadata/config/PluginConfiguration.java | 24 +- .../metadata/config/PreProcessHooks.java | 1 - .../metadata/config/QueriesTabConfig.java | 5 +- .../config/RetentionPluginConfiguration.java | 3 +- .../config/SearchResultVisualConfig.java | 9 +- .../metadata/config/TestsConfiguration.java | 10 +- .../metadata/config/ViewsConfiguration.java | 10 +- .../metadata/config/VisualConfiguration.java | 21 +- .../config/cache/CacheConfiguration.java | 1 - .../EntityDocCountCacheConfiguration.java | 1 - .../cache/HomepageCacheConfiguration.java | 1 - .../cache/PrimaryCacheConfiguration.java | 1 - .../cache/SearchCacheConfiguration.java | 1 - .../SearchLineageCacheConfiguration.java | 1 - .../cache/client/ClientCacheConfig.java | 15 +- .../client/ClientCacheConfiguration.java | 4 +- .../cache/client/EntityClientCacheConfig.java | 17 +- .../cache/client/UsageClientCacheConfig.java | 10 +- .../config/kafka/ConsumerConfiguration.java | 1 - .../config/kafka/ProducerConfiguration.java | 1 - .../kafka/SchemaRegistryConfiguration.java | 1 - .../search/BuildIndicesConfiguration.java | 1 - .../config/search/CustomConfiguration.java | 11 +- .../search/ElasticSearchConfiguration.java | 1 - .../search/ExactMatchConfiguration.java | 1 - .../search/GraphQueryConfiguration.java | 2 +- .../config/search/PartialConfiguration.java | 1 - .../config/search/SearchConfiguration.java | 1 - .../config/search/WordGramConfiguration.java | 1 - .../search/custom/BoolQueryConfiguration.java | 18 +- .../custom/CustomSearchConfiguration.java | 7 +- .../search/custom/QueryConfiguration.java | 24 +- .../telemetry/TelemetryConfiguration.java | 33 +- .../spring/YamlPropertySourceFactory.java | 12 +- .../BatchWriteOperationsOptions.java | 1 - .../factory/auth/AuthorizerChainFactory.java | 93 +- .../AwsRequestSigningApacheInterceptor.java | 69 +- .../auth/DataHubAuthorizerFactory.java | 20 +- .../auth/DataHubTokenServiceFactory.java | 17 +- .../gms/factory/auth/GroupServiceFactory.java | 7 +- .../auth/InviteTokenServiceFactory.java | 3 +- .../auth/NativeUserServiceFactory.java | 15 +- .../gms/factory/auth/PostServiceFactory.java | 3 +- .../gms/factory/auth/RoleServiceFactory.java | 7 +- .../auth/SystemAuthenticationFactory.java | 10 +- .../gms/factory/common/CacheConfig.java | 22 +- .../common/DatasetUrnNameCasingFactory.java | 3 +- .../ElasticSearchGraphServiceFactory.java | 25 +- ...ticSearchSystemMetadataServiceFactory.java | 13 +- .../ElasticsearchSSLContextFactory.java | 154 +- .../gms/factory/common/GitVersionFactory.java | 1 - .../factory/common/GraphClientFactory.java | 1 - .../factory/common/GraphServiceFactory.java | 8 +- .../common/IndexConventionFactory.java | 1 - .../LocalCassandraSessionConfigFactory.java | 24 +- .../common/LocalEbeanServerConfigFactory.java | 4 +- .../factory/common/Neo4jDriverFactory.java | 8 +- .../common/Neo4jGraphServiceFactory.java | 6 +- .../common/RestHighLevelClientFactory.java | 103 +- .../common/SiblingGraphServiceFactory.java | 1 - .../common/SystemMetadataServiceFactory.java | 3 +- .../common/TopicConventionFactory.java | 24 +- .../factory/config/ConfigurationProvider.java | 72 +- .../config/HealthCheckConfiguration.java | 1 - .../DataProductServiceFactory.java | 5 +- .../entity/CassandraSessionFactory.java | 33 +- .../factory/entity/EbeanServerFactory.java | 4 +- .../entity/EntityAspectDaoFactory.java | 3 +- .../EntityAspectMigrationsDaoFactory.java | 3 +- .../factory/entity/EntityServiceFactory.java | 43 +- .../entity/JavaEntityClientFactory.java | 18 +- .../entity/RestliEntityClientFactory.java | 25 +- .../entity/RetentionServiceFactory.java | 14 +- .../indices/UpdateIndicesServiceFactory.java | 29 +- .../ConfigEntityRegistryFactory.java | 3 +- .../entityregistry/EntityRegistryFactory.java | 3 +- .../PluginEntityRegistryFactory.java | 6 +- .../factory/graphql/GraphQLEngineFactory.java | 27 +- .../ingestion/IngestionSchedulerFactory.java | 15 +- .../DataHubKafkaEventProducerFactory.java | 11 +- .../kafka/DataHubKafkaProducerFactory.java | 58 +- .../kafka/KafkaEventConsumerFactory.java | 201 +- .../kafka/SimpleKafkaConsumerFactory.java | 22 +- .../kafka/ThreadPoolContainerCustomizer.java | 7 +- .../AwsGlueSchemaRegistryFactory.java | 15 +- .../DUHESchemaRegistryFactory.java | 42 +- .../InternalSchemaRegistryFactory.java | 23 +- .../KafkaSchemaRegistryFactory.java | 16 +- .../schemaregistry/SchemaRegistryConfig.java | 1 - .../lineage/LineageServiceFactory.java | 8 +- .../OwnershipTypeServiceFactory.java | 5 +- .../factory/query/QueryServiceFactory.java | 3 +- .../RecommendationServiceFactory.java | 28 +- .../MostPopularCandidateSourceFactory.java | 7 +- .../RecentlyEditedCandidateSourceFactory.java | 7 +- ...ecentlySearchedCandidateSourceFactory.java | 1 - .../RecentlyViewedCandidateSourceFactory.java | 7 +- .../TopPlatformsCandidateSourceFactory.java | 1 - .../TopTagsCandidateSourceFactory.java | 1 - .../TopTermsCandidateSourceFactory.java | 1 - .../BaseElasticSearchComponentsFactory.java | 20 +- .../CachingEntitySearchServiceFactory.java | 13 +- .../ElasticSearchBulkProcessorFactory.java | 20 +- .../ElasticSearchIndexBuilderFactory.java | 56 +- .../search/ElasticSearchServiceFactory.java | 60 +- .../search/EntityIndexBuildersFactory.java | 40 +- .../search/EntitySearchServiceFactory.java | 1 - .../search/LineageSearchServiceFactory.java | 19 +- .../SearchDocumentTransformerFactory.java | 3 +- .../factory/search/SearchServiceFactory.java | 9 +- .../search/SettingsBuilderFactory.java | 3 +- .../search/ranker/SearchRankerFactory.java | 1 - .../search/views/ViewServiceFactory.java | 3 +- .../factory/secret/SecretServiceFactory.java | 1 - .../settings/SettingsServiceFactory.java | 3 +- .../gms/factory/telemetry/DailyReport.java | 67 +- .../factory/telemetry/MixpanelApiFactory.java | 1 - .../MixpanelMessageBuilderFactory.java | 2 - .../telemetry/ScheduledAnalyticsFactory.java | 19 +- .../gms/factory/telemetry/TelemetryUtils.java | 55 +- .../telemetry/TrackingServiceFactory.java | 14 +- ...tyChangeEventGeneratorRegistryFactory.java | 34 +- .../timeline/TimelineServiceFactory.java | 9 +- ...cSearchTimeseriesAspectServiceFactory.java | 16 +- .../TimeseriesAspectServiceFactory.java | 1 - .../gms/factory/usage/UsageClientFactory.java | 24 +- .../metadata/boot/BootstrapManager.java | 42 +- .../linkedin/metadata/boot/BootstrapStep.java | 31 +- .../boot/OnBootApplicationListener.java | 67 +- .../linkedin/metadata/boot/UpgradeStep.java | 43 +- .../dependencies/BootstrapDependency.java | 5 +- .../factories/BootstrapManagerFactory.java | 73 +- .../IngestRetentionPoliciesStepFactory.java | 12 +- .../kafka/DataHubUpgradeKafkaListener.java | 70 +- .../boot/kafka/MockDUHEDeserializer.java | 83 +- .../boot/kafka/MockDUHESerializer.java | 83 +- .../boot/steps/BackfillBrowsePathsV2Step.java | 72 +- .../boot/steps/IndexDataPlatformsStep.java | 72 +- .../IngestDataPlatformInstancesStep.java | 31 +- .../boot/steps/IngestDataPlatformsStep.java | 83 +- .../IngestDefaultGlobalSettingsStep.java | 76 +- .../boot/steps/IngestOwnershipTypesStep.java | 45 +- .../boot/steps/IngestPoliciesStep.java | 82 +- .../steps/IngestRetentionPoliciesStep.java | 50 +- .../metadata/boot/steps/IngestRolesStep.java | 68 +- .../boot/steps/IngestRootUserStep.java | 50 +- .../boot/steps/RemoveClientIdAspectStep.java | 5 +- .../steps/RestoreColumnLineageIndices.java | 158 +- .../boot/steps/RestoreDbtSiblingsIndices.java | 116 +- .../boot/steps/RestoreGlossaryIndices.java | 171 +- .../steps/UpgradeDefaultBrowsePathsStep.java | 65 +- .../boot/steps/WaitForSystemUpdateStep.java | 3 +- .../restli/server/RAPServletFactory.java | 85 +- .../restli/server/RestliHandlerServlet.java | 30 +- .../kafka/SimpleKafkaConsumerFactoryTest.java | 30 +- .../gms/factory/search/CacheTest.java | 236 +- ...ElasticSearchBulkProcessorFactoryTest.java | 19 +- ...ticSearchIndexBuilderFactoryEmptyTest.java | 34 +- ...earchIndexBuilderFactoryOverridesTest.java | 41 +- .../ElasticSearchIndexBuilderFactoryTest.java | 22 +- .../secret/SecretServiceFactoryTest.java | 33 +- .../steps/BackfillBrowsePathsV2StepTest.java | 208 +- .../IngestDataPlatformInstancesStepTest.java | 118 +- .../IngestDefaultGlobalSettingsStepTest.java | 92 +- .../RestoreColumnLineageIndicesTest.java | 415 +- .../steps/RestoreGlossaryIndicesTest.java | 447 ++- .../UpgradeDefaultBrowsePathsStepTest.java | 398 +- .../telemetry/TelemetryUtilsTest.java | 10 +- .../datahub/graphql/GraphQLController.java | 158 +- .../datahub/graphql/GraphiQLController.java | 7 +- .../datahub/graphql/SpringQueryContext.java | 6 +- .../openapi-analytics-servlet/build.gradle | 2 - .../config/OpenapiAnalyticsConfig.java | 8 +- .../delegates/DatahubUsageEventsImpl.java | 62 +- .../OpenAPIAnalyticsTestConfiguration.java | 61 +- .../delegates/DatahubUsageEventsImplTest.java | 39 +- .../openapi-entity-servlet/build.gradle | 4 +- .../datahubproject/CustomSpringCodegen.java | 54 +- .../delegates/EntityApiDelegateImpl.java | 1236 +++--- .../openapi/util/OpenApiEntitiesUtil.java | 535 +-- .../OpenAPIEntityTestConfiguration.java | 195 +- .../delegates/EntityApiDelegateImplTest.java | 439 ++- .../openapi/util/OpenApiEntitiesUtilTest.java | 67 +- .../GlobalControllerExceptionHandler.java | 1 - .../openapi/config/SpringWebConfig.java | 24 +- .../StringToChangeCategoryConverter.java | 27 +- .../openapi/dto/RollbackRunResultDto.java | 1 - .../openapi/dto/UpsertAspectRequest.java | 17 +- .../openapi/dto/UrnResponseMap.java | 1 - .../openapi/entities/EntitiesController.java | 226 +- .../openapi/health/HealthCheckController.java | 51 +- .../openapi/health/HealthController.java | 1 - .../elastic/OperationsController.java | 43 +- .../entities/PlatformEntitiesController.java | 38 +- .../RelationshipsController.java | 148 +- .../openapi/timeline/TimelineController.java | 25 +- .../openapi/util/ElasticsearchUtils.java | 3 +- .../openapi/util/MappingUtil.java | 387 +- .../openapi/util/ReflectionCache.java | 224 +- .../java/entities/EntitiesControllerTest.java | 234 +- .../src/test/java/mock/MockEntityService.java | 125 +- .../plugins/auth/configuration/AuthParam.java | 21 +- .../auth/configuration/AuthPluginConfig.java | 15 +- .../AuthenticatorPluginConfig.java | 12 +- .../configuration/AuthorizerPluginConfig.java | 12 +- .../provider/AuthPluginConfigProvider.java | 4 +- .../AuthenticatorPluginConfigProvider.java | 21 +- .../AuthorizerPluginConfigProvider.java | 21 +- .../plugins/common/ConfigValidationUtils.java | 33 +- .../datahub/plugins/common/PluginConfig.java | 37 +- .../plugins/common/PluginConfigProvider.java | 1 - .../common/PluginPermissionManager.java | 9 +- .../datahub/plugins/common/PluginType.java | 12 +- .../datahub/plugins/common/SecurityMode.java | 29 +- .../datahub/plugins/common/YamlMapper.java | 9 +- .../datahub/plugins/configuration/Config.java | 20 +- .../plugins/configuration/ConfigProvider.java | 13 +- .../plugins/configuration/PluginConfig.java | 34 +- .../plugins/factory/PluginConfigFactory.java | 7 +- .../plugins/loader/IsolatedClassLoader.java | 63 +- .../datahub/plugins/loader/JarExtractor.java | 7 +- .../loader/PluginPermissionManagerImpl.java | 5 +- .../com/datahub/plugins/auth/TestConfig.java | 9 +- .../plugins/auth/TestConfigProvider.java | 65 +- .../auth/TestConfigValidationUtils.java | 1 - .../plugins/auth/TestIsolatedClassLoader.java | 138 +- .../plugins/auth/TestPluginConfigFactory.java | 21 +- .../auth/TestPluginPermissionManager.java | 43 +- .../plugins/test/TestAuthenticator.java | 17 +- .../datahub/plugins/test/TestAuthorizer.java | 12 +- .../test/TestLenientModeAuthenticator.java | 9 +- ...linkedin.analytics.analytics.restspec.json | 2 + .../com.linkedin.entity.aspects.restspec.json | 6 + ...com.linkedin.entity.entities.restspec.json | 26 +- ...m.linkedin.entity.entitiesV2.restspec.json | 3 + ...n.entity.entitiesVersionedV2.restspec.json | 2 + .../com.linkedin.entity.runs.restspec.json | 4 + ...nkedin.lineage.relationships.restspec.json | 4 + ...nkedin.operations.operations.restspec.json | 5 + ...m.linkedin.platform.platform.restspec.json | 2 + ...om.linkedin.usage.usageStats.restspec.json | 4 + ...linkedin.analytics.analytics.snapshot.json | 2 + .../com.linkedin.entity.aspects.snapshot.json | 6 + ...com.linkedin.entity.entities.snapshot.json | 26 +- ...m.linkedin.entity.entitiesV2.snapshot.json | 3 + ...n.entity.entitiesVersionedV2.snapshot.json | 2 + .../com.linkedin.entity.runs.snapshot.json | 4 + ...nkedin.lineage.relationships.snapshot.json | 4 + ...nkedin.operations.operations.snapshot.json | 5 + ...m.linkedin.platform.platform.snapshot.json | 2 + ...om.linkedin.usage.usageStats.snapshot.json | 4 + .../main/java/com/linkedin/BatchGetUtils.java | 77 +- .../linkedin/common/client/BaseClient.java | 37 +- .../linkedin/common/client/ClientCache.java | 208 +- .../linkedin/entity/client/EntityClient.java | 378 +- .../entity/client/EntityClientCache.java | 235 +- .../entity/client/RestliEntityClient.java | 583 ++- .../entity/client/SystemEntityClient.java | 142 +- .../client/SystemRestliEntityClient.java | 28 +- .../java/com/linkedin/usage/UsageClient.java | 98 +- .../com/linkedin/usage/UsageClientCache.java | 92 +- .../common/client/BaseClientTest.java | 100 +- .../metadata/filter/RestliLoggingFilter.java | 8 +- .../resources/analytics/Analytics.java | 45 +- .../resources/entity/AspectResource.java | 293 +- .../entity/BatchIngestionRunResource.java | 492 ++- .../resources/entity/EntityResource.java | 837 ++-- .../resources/entity/EntityV2Resource.java | 119 +- .../entity/EntityVersionedV2Resource.java | 86 +- .../resources/entity/ResourceUtils.java | 9 +- .../resources/lineage/Relationships.java | 153 +- .../operations/OperationsResource.java | 206 +- .../metadata/resources/operations/Utils.java | 36 +- .../resources/platform/PlatformResource.java | 32 +- .../resources/restli/RestliConstants.java | 2 +- .../resources/restli/RestliUtils.java | 49 +- .../metadata/resources/usage/UsageStats.java | 313 +- .../resources/entity/AspectResourceTest.java | 73 +- .../operations/OperationsResourceTest.java | 124 +- .../mock/MockTimeseriesAspectService.java | 53 +- .../schema-registry-api/build.gradle | 2 - .../generated/CompatibilityCheckResponse.java | 45 +- .../openapi/generated/Config.java | 55 +- .../generated/ConfigUpdateRequest.java | 55 +- .../openapi/generated/ErrorMessage.java | 43 +- .../openapi/generated/Mode.java | 49 +- .../openapi/generated/ModeUpdateRequest.java | 49 +- .../generated/RegisterSchemaRequest.java | 71 +- .../generated/RegisterSchemaResponse.java | 37 +- .../openapi/generated/Schema.java | 75 +- .../openapi/generated/SchemaReference.java | 63 +- .../SchemaRegistryServerVersion.java | 43 +- .../openapi/generated/SchemaString.java | 64 +- .../openapi/generated/ServerClusterId.java | 43 +- .../openapi/generated/SubjectVersion.java | 43 +- .../java/io/swagger/api/CompatibilityApi.java | 311 +- .../api/CompatibilityApiController.java | 40 +- .../main/java/io/swagger/api/ConfigApi.java | 614 ++- .../io/swagger/api/ConfigApiController.java | 40 +- .../main/java/io/swagger/api/ContextsApi.java | 131 +- .../io/swagger/api/ContextsApiController.java | 40 +- .../main/java/io/swagger/api/DefaultApi.java | 162 +- .../io/swagger/api/DefaultApiController.java | 40 +- .../src/main/java/io/swagger/api/ModeApi.java | 529 ++- .../io/swagger/api/ModeApiController.java | 40 +- .../main/java/io/swagger/api/SchemasApi.java | 653 +++- .../io/swagger/api/SchemasApiController.java | 40 +- .../main/java/io/swagger/api/SubjectsApi.java | 1051 +++-- .../io/swagger/api/SubjectsApiController.java | 40 +- .../src/main/java/io/swagger/api/V1Api.java | 114 +- .../java/io/swagger/api/V1ApiController.java | 40 +- .../registry/SchemaRegistryController.java | 126 +- .../config/SpringWebSchemaRegistryConfig.java | 8 +- .../openapi/test/OpenAPISpringTestServer.java | 1 - .../OpenAPISpringTestServerConfiguration.java | 10 +- .../test/SchemaRegistryControllerTest.java | 82 +- ...maRegistryControllerTestConfiguration.java | 5 +- .../DataHubUsageEventConstants.java | 3 +- .../datahubusage/DataHubUsageEventType.java | 1 - .../linkedin/metadata/entity/AspectUtils.java | 86 +- .../metadata/entity/DeleteEntityService.java | 787 ++-- .../metadata/entity/DeleteEntityUtils.java | 162 +- .../metadata/entity/EntityService.java | 188 +- .../metadata/entity/IngestProposalResult.java | 1 - .../metadata/entity/IngestResult.java | 14 +- .../linkedin/metadata/entity/ListResult.java | 3 +- .../metadata/entity/RetentionService.java | 136 +- .../metadata/entity/RollbackResult.java | 2 - .../metadata/entity/RollbackRunResult.java | 1 - .../metadata/entity/UpdateAspectResult.java | 4 +- .../restoreindices/RestoreIndicesArgs.java | 86 +- .../restoreindices/RestoreIndicesResult.java | 18 +- .../retention/BulkApplyRetentionArgs.java | 10 +- .../retention/BulkApplyRetentionResult.java | 18 +- .../transactions/AbstractBatchItem.java | 124 +- .../entity/transactions/AspectsBatch.java | 26 +- .../com/linkedin/metadata/graph/Edge.java | 28 +- .../linkedin/metadata/graph/GraphClient.java | 18 +- .../linkedin/metadata/graph/GraphFilters.java | 1 - .../metadata/graph/GraphIndexUtils.java | 94 +- .../linkedin/metadata/graph/GraphService.java | 296 +- .../metadata/graph/RelatedEntity.java | 8 +- .../RecommendationsService.java | 46 +- .../DomainsCandidateSource.java | 5 +- .../EntitySearchAggregationSource.java | 87 +- .../RecentlySearchedSource.java | 51 +- .../candidatesource/RecommendationSource.java | 33 +- .../candidatesource/RecommendationUtils.java | 8 +- .../candidatesource/TopPlatformsSource.java | 39 +- .../candidatesource/TopTagsSource.java | 4 +- .../candidatesource/TopTermsSource.java | 4 +- .../ranker/RecommendationModuleRanker.java | 8 +- .../ranker/SimpleRecommendationRanker.java | 8 +- .../registry/SchemaRegistryService.java | 3 +- .../registry/SchemaRegistryServiceImpl.java | 55 +- .../metadata/resource/ResourceReference.java | 13 +- .../metadata/resource/SubResourceType.java | 4 +- .../metadata/search/EntitySearchService.java | 153 +- .../metadata/search/utils/QueryUtils.java | 75 +- .../metadata/secret/SecretService.java | 7 +- .../metadata/service/BaseService.java | 81 +- .../metadata/service/DataProductService.java | 266 +- .../metadata/service/DomainService.java | 169 +- .../metadata/service/GlossaryTermService.java | 260 +- .../metadata/service/LineageService.java | 474 ++- .../metadata/service/OwnerService.java | 106 +- .../service/OwnershipTypeService.java | 126 +- .../metadata/service/QueryService.java | 124 +- .../metadata/service/SettingsService.java | 95 +- .../linkedin/metadata/service/TagService.java | 241 +- .../metadata/service/ViewService.java | 108 +- .../metadata/shared/ValidationUtils.java | 152 +- .../systemmetadata/SystemMetadataService.java | 10 +- .../metadata/timeline/SemanticVersion.java | 16 +- .../metadata/timeline/TimelineService.java | 7 +- .../timeline/data/ChangeCategory.java | 11 +- .../metadata/timeline/data/ChangeEvent.java | 50 +- .../timeline/data/ChangeOperation.java | 36 +- .../timeline/data/ChangeTransaction.java | 6 +- .../timeline/data/PatchOperation.java | 1 - .../timeline/data/SemanticChangeType.java | 6 +- .../timeline/data/SemanticDifference.java | 3 +- .../timeseries/TimeseriesAspectService.java | 101 +- .../metadata/service/DomainServiceTest.java | 351 +- .../service/GlossaryTermServiceTest.java | 579 +-- .../metadata/service/LineageServiceTest.java | 376 +- .../metadata/service/OwnerServiceTest.java | 255 +- .../service/OwnershipTypeServiceTest.java | 423 +-- .../metadata/service/QueryServiceTest.java | 660 ++-- .../metadata/service/SettingsServiceTest.java | 345 +- .../metadata/service/TagServiceTest.java | 497 +-- .../metadata/service/ViewServiceTest.java | 664 ++-- .../java/com/datahub/gms/servlet/Config.java | 92 +- .../gms/servlet/ConfigSearchExport.java | 198 +- .../java/com/datahub/gms/util/CSVWriter.java | 66 +- .../authorization/PoliciesConfig.java | 1005 ++--- .../restli/DefaultRestliClientFactory.java | 56 +- .../linkedin/metadata/restli/RestliUtil.java | 35 +- .../metadata/utils/AuditStampUtils.java | 20 +- .../linkedin/metadata/utils/BrowseUtil.java | 28 +- .../metadata/utils/ConcurrencyUtils.java | 75 +- .../utils/DataPlatformInstanceUtils.java | 22 +- .../metadata/utils/EntityKeyUtils.java | 103 +- .../metadata/utils/GenericRecordUtils.java | 31 +- .../metadata/utils/IngestionUtils.java | 22 +- .../linkedin/metadata/utils/PegasusUtils.java | 71 +- .../linkedin/metadata/utils/SearchUtil.java | 107 +- .../metadata/utils/SystemMetadataUtils.java | 13 +- .../utils/elasticsearch/IndexConvention.java | 11 +- .../elasticsearch/IndexConventionImpl.java | 31 +- .../exception/UnsupportedGraphEntities.java | 4 +- .../metadata/utils/log/LogMessageFilter.java | 11 +- .../metadata/utils/metrics/MetricUtils.java | 10 +- .../metadata/utils/EntityKeyUtilsTest.java | 27 +- .../metadata/utils/IngestionUtilsTest.java | 14 +- .../metadata/utils/SearchUtilTest.java | 42 +- .../IndexConventionImplTest.java | 48 +- .../src/main/java/mock/MockAspectSpec.java | 18 +- .../main/java/mock/MockEntityRegistry.java | 2 - .../src/main/java/mock/MockEntitySpec.java | 28 +- .../java/com/datahub/utils/TestUtils.java | 5 +- .../com/datahub/test/testing/urn/BarUrn.java | 7 +- .../test/testing/urn/BarUrnCoercer.java | 4 +- .../test/testing/urn/BaseUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/BazUrn.java | 7 +- .../test/testing/urn/BazUrnCoercer.java | 4 +- .../com/datahub/test/testing/urn/FooUrn.java | 7 +- .../test/testing/urn/FooUrnCoercer.java | 4 +- .../datahub/test/testing/urn/PizzaUrn.java | 7 +- .../test/testing/urn/PizzaUrnCoercer.java | 4 +- .../testing/urn/SingleAspectEntityUrn.java | 1 - 1711 files changed, 91903 insertions(+), 71109 deletions(-) delete mode 100644 gradle/checkstyle/checkstyle.xml delete mode 100644 gradle/checkstyle/suppressions.xml diff --git a/build.gradle b/build.gradle index c1278a6dab1a0..f5e5403e822e7 100644 --- a/build.gradle +++ b/build.gradle @@ -21,6 +21,7 @@ buildscript { ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' + ext.googleJavaFormatVersion = '1.18.1' ext.docker_registry = 'linkedin' @@ -42,6 +43,7 @@ plugins { id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' id 'com.github.johnrengelman.shadow' version '6.1.0' id 'com.palantir.docker' version '0.35.0' apply false + id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -225,13 +227,11 @@ project.ext.externalDependency = [ 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' - apply plugin: 'checkstyle' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' } @@ -253,6 +253,7 @@ subprojects { apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' + apply plugin: 'com.diffplug.spotless' gitProperties { keys = ['git.commit.id','git.commit.id.describe','git.commit.time'] @@ -266,6 +267,7 @@ subprojects { plugins.withType(JavaPlugin) { dependencies { constraints { + implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') implementation('org.apache.commons:commons-compress:1.21') implementation('org.apache.velocity:velocity-engine-core:2.3') @@ -274,13 +276,32 @@ subprojects { implementation("com.fasterxml.jackson.core:jackson-dataformat-cbor:$jacksonVersion") } } - - checkstyle { - configDirectory = file("${project.rootDir}/gradle/checkstyle") - sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ] - toolVersion = "8.0" - maxWarnings = 0 - ignoreFailures = false + spotless { + java { + googleJavaFormat() + target project.fileTree(project.projectDir) { + include '**/*.java' + exclude 'build/**/*.java' + exclude '**/generated/**/*.*' + exclude '**/mainGeneratedDataTemplate/**/*.*' + exclude '**/mainGeneratedRest/**/*.*' + } + } + } + afterEvaluate { + def spotlessJavaTask = tasks.findByName('spotlessJava') + def processTask = tasks.findByName('processResources') + if (processTask != null) { + spotlessJavaTask.dependsOn processTask + } + def compileJavaTask = tasks.findByName('compileJava') + if (compileJavaTask != null) { + spotlessJavaTask.dependsOn compileJavaTask + } + // TODO - Do not run this in CI. How? + // tasks.withType(JavaCompile) { + // finalizedBy(tasks.findByName('spotlessApply')) + // } } } diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index fe04c3629fe58..ef33bde8f61d3 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,5 +1,9 @@ package auth; +import static auth.AuthUtils.*; +import static auth.sso.oidc.OidcConfigs.*; +import static utils.ConfigUtil.*; + import auth.sso.SsoConfigs; import auth.sso.SsoManager; import auth.sso.oidc.OidcConfigs; @@ -18,12 +22,10 @@ import com.linkedin.util.Configuration; import config.ConfigurationProvider; import controllers.SsoCallbackController; - import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; @@ -42,205 +44,227 @@ import play.cache.SyncCacheApi; import utils.ConfigUtil; -import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; -import static utils.ConfigUtil.*; +/** Responsible for configuring, validating, and providing authentication related components. */ +public class AuthModule extends AbstractModule { + /** + * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration + * value provides a stable encryption base from which to derive the encryption key. + * + *

We hash this value (SHA256), then take the first 16 bytes as the AES key. + */ + private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; -/** - * Responsible for configuring, validating, and providing authentication related components. - */ -public class AuthModule extends AbstractModule { + private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; + private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; + private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + + private final com.typesafe.config.Config _configs; + + public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { + _configs = configs; + } + @Override + protected void configure() { /** - * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration - * value provides a stable encryption base from which to derive the encryption key. - * - * We hash this value (SHA256), then take the first 16 bytes as the AES key. + * In Pac4J, you are given the option to store the profiles of authenticated users in either (i) + * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore + * saves your data in the Play session cookie However there is problem + * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j + * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters, + * the response will be rejected by the browser. Default to PlayCacheCookieStore so that + * datahub-frontend container remains as a stateless service */ - private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; - private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; - private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; - private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - private final com.typesafe.config.Config _configs; - - public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { - _configs = configs; + if (sessionStoreProvider.equals("PlayCacheSessionStore")) { + final PlayCacheSessionStore playCacheSessionStore = + new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); + bind(SessionStore.class).toInstance(playCacheSessionStore); + bind(PlaySessionStore.class).toInstance(playCacheSessionStore); + } else { + PlayCookieSessionStore playCacheCookieStore; + try { + // To generate a valid encryption key from an input value, we first + // hash the input to generate a fixed-length string. Then, we convert + // it to hex and slice the first 16 bytes, because AES key length must strictly + // have a specific length. + final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); + final String aesKeyHash = + DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); + final String aesEncryptionKey = aesKeyHash.substring(0, 16); + playCacheCookieStore = + new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); + } + bind(SessionStore.class).toInstance(playCacheCookieStore); + bind(PlaySessionStore.class).toInstance(playCacheCookieStore); } - @Override - protected void configure() { - /** - * In Pac4J, you are given the option to store the profiles of authenticated users in either - * (i) PlayCacheSessionStore - saves your data in the Play cache or - * (ii) PlayCookieSessionStore saves your data in the Play session cookie - * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie. - * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser. - * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service - */ - String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - - if (sessionStoreProvider.equals("PlayCacheSessionStore")) { - final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); - bind(SessionStore.class).toInstance(playCacheSessionStore); - bind(PlaySessionStore.class).toInstance(playCacheSessionStore); - } else { - PlayCookieSessionStore playCacheCookieStore; - try { - // To generate a valid encryption key from an input value, we first - // hash the input to generate a fixed-length string. Then, we convert - // it to hex and slice the first 16 bytes, because AES key length must strictly - // have a specific length. - final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); - final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); - final String aesEncryptionKey = aesKeyHash.substring(0, 16); - playCacheCookieStore = new PlayCookieSessionStore( - new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); - } catch (Exception e) { - throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); - } - bind(SessionStore.class).toInstance(playCacheCookieStore); - bind(PlaySessionStore.class).toInstance(playCacheCookieStore); - } - - try { - bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor( - SsoManager.class, - Authentication.class, - SystemEntityClient.class, - AuthServiceClient.class, - com.typesafe.config.Config.class)); - } catch (NoSuchMethodException | SecurityException e) { - throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e); - } - // logout - final LogoutController logoutController = new LogoutController(); - logoutController.setDefaultUrl("/"); - bind(LogoutController.class).toInstance(logoutController); + try { + bind(SsoCallbackController.class) + .toConstructor( + SsoCallbackController.class.getConstructor( + SsoManager.class, + Authentication.class, + SystemEntityClient.class, + AuthServiceClient.class, + com.typesafe.config.Config.class)); + } catch (NoSuchMethodException | SecurityException e) { + throw new RuntimeException( + "Failed to bind to SsoCallbackController. Cannot find constructor", e); } + // logout + final LogoutController logoutController = new LogoutController(); + logoutController.setDefaultUrl("/"); + bind(LogoutController.class).toInstance(logoutController); + } - @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); + @Provides + @Singleton + protected Config provideConfig(SsoManager ssoManager) { + if (ssoManager.isSsoEnabled()) { + final Clients clients = new Clients(); + final List clientList = new ArrayList<>(); + clientList.add(ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + final Config config = new Config(clients); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; } + return new Config(); + } - @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } - return manager; + @Provides + @Singleton + protected SsoManager provideSsoManager() { + SsoManager manager = new SsoManager(); + // Seed the SSO manager with a default SSO provider. + if (isSsoEnabled(_configs)) { + SsoConfigs ssoConfigs = new SsoConfigs(_configs); + if (ssoConfigs.isOidcEnabled()) { + // Register OIDC Provider, add to list of managers. + OidcConfigs oidcConfigs = new OidcConfigs(_configs); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + // Set the default SSO provider to this OIDC client. + manager.setSsoProvider(oidcProvider); + } } + return manager; + } - @Provides - @Singleton - protected Authentication provideSystemAuthentication() { - // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. - String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); - String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = - new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap()); - } + @Provides + @Singleton + protected Authentication provideSystemAuthentication() { + // Returns an instance of Authentication used to authenticate system initiated calls to Metadata + // Service. + String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); + String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication( + systemActor, + String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); + } - @Provides - @Singleton - protected ConfigurationProvider provideConfigurationProvider() { - AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class); - return context.getBean(ConfigurationProvider.class); - } + @Provides + @Singleton + protected ConfigurationProvider provideConfigurationProvider() { + AnnotationConfigApplicationContext context = + new AnnotationConfigApplicationContext(ConfigurationProvider.class); + return context.getBean(ConfigurationProvider.class); + } - @Provides - @Singleton - protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication, - final ConfigurationProvider configurationProvider) { - return new SystemRestliEntityClient(buildRestliClient(), - new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), - _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - } + @Provides + @Singleton + protected SystemEntityClient provideEntityClient( + final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + return new SystemRestliEntityClient( + buildRestliClient(), + new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), + _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); + } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } - @Provides - @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { - // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( - METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, - systemAuthentication, httpClient); - } + @Provides + @Singleton + protected AuthServiceClient provideAuthClient( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + // Init a GMS auth client + final String metadataServiceHost = + _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + + final int metadataServicePort = + _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - private com.linkedin.restli.client.Client buildRestliClient() { - final String metadataServiceHost = utils.ConfigUtil.getString( + final Boolean metadataServiceUseSsl = + _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + + return new AuthServiceClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + systemAuthentication, + httpClient); + } + + private com.linkedin.restli.client.Client buildRestliClient() { + final String metadataServiceHost = + utils.ConfigUtil.getString( _configs, METADATA_SERVICE_HOST_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = utils.ConfigUtil.getInt( + final int metadataServicePort = + utils.ConfigUtil.getInt( _configs, utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean( + final boolean metadataServiceUseSsl = + utils.ConfigUtil.getBoolean( _configs, utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); - final String metadataServiceSslProtocol = utils.ConfigUtil.getString( + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); + final String metadataServiceSslProtocol = + utils.ConfigUtil.getString( _configs, utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL - ); - return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol); - } + ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL); + return DefaultRestliClientFactory.getRestLiClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + metadataServiceSslProtocol); + } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); - } + protected boolean isSsoEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( + protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); - } + } } - diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 386eee725c83d..283a2164584b9 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -1,137 +1,136 @@ package auth; import com.linkedin.common.urn.CorpuserUrn; -import lombok.extern.slf4j.Slf4j; -import play.mvc.Http; - -import javax.annotation.Nonnull; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import play.mvc.Http; @Slf4j public class AuthUtils { - /** - * The config path that determines whether Metadata Service Authentication is enabled. - * - * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid - * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It - * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the - * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. - * - * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise, - * requests will be denied with an Unauthorized error. - */ - public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled"; - - /** - * The attribute inside session cookie representing a GMS-issued access token - */ - public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; - - /** - * An ID used to identify system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; - - /** - * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; - - /** - * Cookie name for redirect url that is manually separated from the session to reduce size - */ - public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; - - public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); - - public static final String LOGIN_ROUTE = "/login"; - public static final String USER_NAME = "username"; - public static final String PASSWORD = "password"; - public static final String ACTOR = "actor"; - public static final String ACCESS_TOKEN = "token"; - public static final String FULL_NAME = "fullName"; - public static final String EMAIL = "email"; - public static final String TITLE = "title"; - public static final String INVITE_TOKEN = "inviteToken"; - public static final String RESET_TOKEN = "resetToken"; - - /** - * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply - * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued - * by the frontend. - * - * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will - * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED. - * - * Returns true if the request is eligible to be forwarded to GMS, false otherwise. - */ - public static boolean isEligibleForForwarding(Http.Request req) { - return hasValidSessionCookie(req) || hasAuthHeader(req); + /** + * The config path that determines whether Metadata Service Authentication is enabled. + * + *

When enabled, the frontend server will proxy requests to the Metadata Service without + * requiring them to have a valid frontend-issued Session Cookie. This effectively means + * delegating the act of authentication to the Metadata Service. It is critical that if Metadata + * Service authentication is enabled at the frontend service layer, it is also enabled in the + * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. + * + *

When disabled, the frontend server will require that all requests have a valid Session + * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error. + */ + public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = + "metadataService.auth.enabled"; + + /** The attribute inside session cookie representing a GMS-issued access token */ + public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; + + /** + * An ID used to identify system callers that are internal to DataHub. Provided via configuration. + */ + public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; + + /** + * An Secret used to authenticate system callers that are internal to DataHub. Provided via + * configuration. + */ + public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; + + /** Cookie name for redirect url that is manually separated from the session to reduce size */ + public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; + + public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); + + public static final String LOGIN_ROUTE = "/login"; + public static final String USER_NAME = "username"; + public static final String PASSWORD = "password"; + public static final String ACTOR = "actor"; + public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; + + /** + * Determines whether the inbound request should be forward to downstream Metadata Service. Today, + * this simply checks for the presence of an "Authorization" header or the presence of a valid + * session cookie issued by the frontend. + * + *

Note that this method DOES NOT actually verify the authentication token of an inbound + * request. That will be handled by the downstream Metadata Service. Until then, the request + * should be treated as UNAUTHENTICATED. + * + *

Returns true if the request is eligible to be forwarded to GMS, false otherwise. + */ + public static boolean isEligibleForForwarding(Http.Request req) { + return hasValidSessionCookie(req) || hasAuthHeader(req); + } + + /** + * Returns true if a request has a valid session cookie issued by the frontend server. Note that + * this DOES NOT verify whether the token within the session cookie will be accepted by the + * downstream GMS service. + * + *

Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, + * as well as their agreement to determine authentication status. + */ + public static boolean hasValidSessionCookie(final Http.Request req) { + Map sessionCookie = req.session().data(); + return sessionCookie.containsKey(ACCESS_TOKEN) + && sessionCookie.containsKey(ACTOR) + && req.getCookie(ACTOR).isPresent() + && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); + } + + /** Returns true if a request includes the Authorization header, false otherwise */ + public static boolean hasAuthHeader(final Http.Request req) { + return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); + } + + /** + * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. + * + * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" + * @param ttlInHours the number of hours until the actor cookie expires after being set + */ + public static Http.Cookie createActorCookie( + @Nonnull final String actorUrn, + @Nonnull final Integer ttlInHours, + @Nonnull final String sameSite, + final boolean isSecure) { + return Http.Cookie.builder(ACTOR, actorUrn) + .withHttpOnly(false) + .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) + .withSameSite(convertSameSiteValue(sameSite)) + .withSecure(isSecure) + .build(); + } + + public static Map createSessionMap( + final String userUrnStr, final String accessToken) { + final Map sessionAttributes = new HashMap<>(); + sessionAttributes.put(ACTOR, userUrnStr); + sessionAttributes.put(ACCESS_TOKEN, accessToken); + return sessionAttributes; + } + + private AuthUtils() {} + + private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { + try { + return Http.Cookie.SameSite.valueOf(sameSiteValue); + } catch (IllegalArgumentException e) { + log.warn( + String.format( + "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), + e); + return Http.Cookie.SameSite.LAX; } - - /** - * Returns true if a request has a valid session cookie issued by the frontend server. - * Note that this DOES NOT verify whether the token within the session cookie will be accepted - * by the downstream GMS service. - * - * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, - * as well as their agreement to determine authentication status. - */ - public static boolean hasValidSessionCookie(final Http.Request req) { - Map sessionCookie = req.session().data(); - return sessionCookie.containsKey(ACCESS_TOKEN) - && sessionCookie.containsKey(ACTOR) - && req.getCookie(ACTOR).isPresent() - && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); - } - - /** - * Returns true if a request includes the Authorization header, false otherwise - */ - public static boolean hasAuthHeader(final Http.Request req) { - return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); - } - - /** - * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. - * - * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" - * @param ttlInHours the number of hours until the actor cookie expires after being set - */ - public static Http.Cookie createActorCookie( - @Nonnull final String actorUrn, - @Nonnull final Integer ttlInHours, - @Nonnull final String sameSite, - final boolean isSecure - ) { - return Http.Cookie.builder(ACTOR, actorUrn) - .withHttpOnly(false) - .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) - .withSameSite(convertSameSiteValue(sameSite)) - .withSecure(isSecure) - .build(); - } - - public static Map createSessionMap(final String userUrnStr, final String accessToken) { - final Map sessionAttributes = new HashMap<>(); - sessionAttributes.put(ACTOR, userUrnStr); - sessionAttributes.put(ACCESS_TOKEN, accessToken); - return sessionAttributes; - } - - private AuthUtils() { } - - private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { - try { - return Http.Cookie.SameSite.valueOf(sameSiteValue); - } catch (IllegalArgumentException e) { - log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e); - return Http.Cookie.SameSite.LAX; - } - } - + } } diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java index ae847b318dce2..8536fc7e01695 100644 --- a/datahub-frontend/app/auth/Authenticator.java +++ b/datahub-frontend/app/auth/Authenticator.java @@ -1,48 +1,49 @@ package auth; +import static auth.AuthUtils.*; + import com.typesafe.config.Config; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.inject.Inject; import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import static auth.AuthUtils.*; - - /** * Implementation of base Play Authentication used to determine if a request to a route should be * authenticated. */ public class Authenticator extends Security.Authenticator { - private final boolean metadataServiceAuthEnabled; + private final boolean metadataServiceAuthEnabled; - @Inject - public Authenticator(@Nonnull Config config) { - this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + @Inject + public Authenticator(@Nonnull Config config) { + this.metadataServiceAuthEnabled = + config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) && config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH); + } + + @Override + public Optional getUsername(@Nonnull Http.Request req) { + if (this.metadataServiceAuthEnabled) { + // If Metadata Service auth is enabled, we only want to verify presence of the + // "Authorization" header OR the presence of a frontend generated session cookie. + // At this time, the actor is still considered to be unauthenicated. + return Optional.ofNullable( + AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); + } else { + // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. + return Optional.ofNullable( + AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); } + } - @Override - public Optional getUsername(@Nonnull Http.Request req) { - if (this.metadataServiceAuthEnabled) { - // If Metadata Service auth is enabled, we only want to verify presence of the - // "Authorization" header OR the presence of a frontend generated session cookie. - // At this time, the actor is still considered to be unauthenicated. - return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); - } else { - // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. - return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); - } - } - - @Override - @Nonnull - public Result onUnauthorized(@Nullable Http.Request req) { - return unauthorized(); - } + @Override + @Nonnull + public Result onUnauthorized(@Nullable Http.Request req) { + return unauthorized(); + } } diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java index e0999ee00be38..9fbed91ce6a10 100644 --- a/datahub-frontend/app/auth/ConfigUtil.java +++ b/datahub-frontend/app/auth/ConfigUtil.java @@ -3,20 +3,20 @@ import com.typesafe.config.Config; import java.util.Optional; - public class ConfigUtil { - private ConfigUtil() { - } + private ConfigUtil() {} public static String getRequired(final Config configs, final String path) { if (!configs.hasPath(path)) { - throw new IllegalArgumentException(String.format("Missing required config with path %s", path)); + throw new IllegalArgumentException( + String.format("Missing required config with path %s", path)); } return configs.getString(path); } - public static String getOptional(final Config configs, final String path, final String defaultVal) { + public static String getOptional( + final Config configs, final String path, final String defaultVal) { if (!configs.hasPath(path)) { return defaultVal; } diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java index b6da9b7a1833c..63b2ce61aaf9b 100644 --- a/datahub-frontend/app/auth/CookieConfigs.java +++ b/datahub-frontend/app/auth/CookieConfigs.java @@ -1,6 +1,5 @@ package auth; - import com.typesafe.config.Config; public class CookieConfigs { @@ -16,12 +15,18 @@ public class CookieConfigs { private final boolean _authCookieSecure; public CookieConfigs(final Config configs) { - _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH) - : DEFAULT_SESSION_TTL_HOURS; - _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE) - : DEFAULT_AUTH_COOKIE_SAME_SITE; - _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE) - : DEFAULT_AUTH_COOKIE_SECURE; + _ttlInHours = + configs.hasPath(SESSION_TTL_CONFIG_PATH) + ? configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + _authCookieSameSite = + configs.hasPath(AUTH_COOKIE_SAME_SITE) + ? configs.getString(AUTH_COOKIE_SAME_SITE) + : DEFAULT_AUTH_COOKIE_SAME_SITE; + _authCookieSecure = + configs.hasPath(AUTH_COOKIE_SECURE) + ? configs.getBoolean(AUTH_COOKIE_SECURE) + : DEFAULT_AUTH_COOKIE_SECURE; } public int getTtlInHours() { diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java index f39c20aceb6f9..529bf98e1fdcf 100644 --- a/datahub-frontend/app/auth/JAASConfigs.java +++ b/datahub-frontend/app/auth/JAASConfigs.java @@ -6,17 +6,18 @@ */ public class JAASConfigs { - public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; + public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; - private Boolean _isEnabled = true; + private Boolean _isEnabled = true; - public JAASConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { - _isEnabled = false; - } + public JAASConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) + && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { + _isEnabled = false; } + } - public boolean isJAASEnabled() { - return _isEnabled; - } + public boolean isJAASEnabled() { + return _isEnabled; + } } diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index 3114da92d7d79..772c2c8f92f28 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -1,23 +1,27 @@ package auth; -/** - * Currently, this config enables or disable native user authentication. - */ +/** Currently, this config enables or disable native user authentication. */ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; - public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = + "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { - _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + _isEnabled = + Boolean.parseBoolean( + configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); } if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { _isEnforceValidEmailEnabled = - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString()); + Boolean.parseBoolean( + configs + .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH) + .toString()); } } diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java index a6dbd69a93889..223ac669bd6ea 100644 --- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java +++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java @@ -7,16 +7,15 @@ import play.api.mvc.FlashCookieBaker; import play.api.mvc.SessionCookieBaker; - public class CustomCookiesModule extends AbstractModule { @Override public void configure() { bind(CookieSigner.class).toProvider(CookieSignerProvider.class); - // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie + // We override the session cookie baker to not use a fallback, this prevents using an old URL + // Encoded cookie bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class); // We don't care about flash cookies, we don't use them bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class); } - } diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 062054173bddb..1f8455e773ffb 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -2,24 +2,19 @@ import static auth.ConfigUtil.*; - -/** - * Class responsible for extracting and validating top-level SSO related configurations. - */ +/** Class responsible for extracting and validating top-level SSO related configurations. */ public class SsoConfigs { - /** - * Required configs - */ + /** Required configs */ private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl"; + private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath"; private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath"; public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled"; - /** - * Default values - */ + /** Default values */ private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback"; + private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/"; private final String _authBaseUrl; @@ -29,17 +24,14 @@ public class SsoConfigs { public SsoConfigs(final com.typesafe.config.Config configs) { _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = getOptional( - configs, - AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, - DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = getOptional( - configs, - AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, - DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + _authBaseCallbackPath = + getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH); + _authSuccessRedirectPath = + getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH); + _oidcEnabled = + configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( + Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); } public String getAuthBaseUrl() { diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index 739ce3f1ba450..bf33f4148a553 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -2,19 +2,16 @@ import javax.annotation.Nonnull; - -/** - * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. - */ +/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */ public class SsoManager { private SsoProvider _provider; // Only one active provider at a time. - public SsoManager() { } + public SsoManager() {} /** - * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been - * provided to the manager. + * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the + * manager. * * @return true if SSO logic is enabled, false otherwise. */ @@ -34,8 +31,8 @@ public void setSsoProvider(@Nonnull final SsoProvider provider) { /** * Gets the active {@link SsoProvider} instance. * - * @return the {@SsoProvider} that should be used during authentication and on - * IdP callback, or null if SSO is not enabled. + * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or + * null if SSO is not enabled. */ public SsoProvider getSsoProvider() { return _provider; diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java index f7454d599ba99..a0947b52b92ae 100644 --- a/datahub-frontend/app/auth/sso/SsoProvider.java +++ b/datahub-frontend/app/auth/sso/SsoProvider.java @@ -3,15 +3,10 @@ import org.pac4j.core.client.Client; import org.pac4j.core.credentials.Credentials; -/** - * A thin interface over a Pac4j {@link Client} object and its - * associated configurations. - */ +/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */ public interface SsoProvider { - /** - * The protocol used for SSO. - */ + /** The protocol used for SSO. */ enum SsoProtocol { OIDC("oidc"); // SAML -- not yet supported. @@ -28,19 +23,12 @@ public String getCommonName() { } } - /** - * Returns the configs required by the provider. - */ + /** Returns the configs required by the provider. */ C configs(); - /** - * Returns the SSO protocol associated with the provider instance. - */ + /** Returns the SSO protocol associated with the provider instance. */ SsoProtocol protocol(); - /** - * Retrieves an initialized Pac4j {@link Client}. - */ + /** Retrieves an initialized Pac4j {@link Client}. */ Client client(); - } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java index baca144610ec4..fa676d2d16c90 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java @@ -1,9 +1,9 @@ package auth.sso.oidc; +import com.nimbusds.jwt.JWT; +import com.nimbusds.jwt.JWTParser; import java.util.Map.Entry; import java.util.Optional; - -import com.nimbusds.jwt.JWTParser; import org.pac4j.core.authorization.generator.AuthorizationGenerator; import org.pac4j.core.context.WebContext; import org.pac4j.core.profile.AttributeLocation; @@ -14,44 +14,43 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.nimbusds.jwt.JWT; - public class OidcAuthorizationGenerator implements AuthorizationGenerator { - private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - - private final ProfileDefinition profileDef; + private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - private final OidcConfigs oidcConfigs; + private final ProfileDefinition profileDef; - public OidcAuthorizationGenerator(final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { - this.profileDef = profileDef; - this.oidcConfigs = oidcConfigs; - } + private final OidcConfigs oidcConfigs; - @Override - public Optional generate(WebContext context, UserProfile profile) { - if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { - try { - final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); - - CommonProfile commonProfile = new CommonProfile(); - - for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { - final String claimName = entry.getKey(); - - if (profile.getAttribute(claimName) == null) { - profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); - } - } - - return Optional.of(commonProfile); - } catch (Exception e) { - logger.warn("Cannot parse access token claims", e); - } + public OidcAuthorizationGenerator( + final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { + this.profileDef = profileDef; + this.oidcConfigs = oidcConfigs; + } + + @Override + public Optional generate(WebContext context, UserProfile profile) { + if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { + try { + final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); + + CommonProfile commonProfile = new CommonProfile(); + + for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { + final String claimName = entry.getKey(); + + if (profile.getAttribute(claimName) == null) { + profileDef.convertAndAdd( + commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); + } } - - return Optional.ofNullable(profile); + + return Optional.of(commonProfile); + } catch (Exception e) { + logger.warn("Cannot parse access token claims", e); + } } - + + return Optional.ofNullable(profile); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index 7164710f4e0de..fa562f54312ec 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -1,6 +1,13 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static org.pac4j.play.store.PlayCookieSessionStore.*; +import static play.mvc.Results.internalServerError; + import auth.CookieConfigs; +import auth.sso.SsoManager; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -59,23 +66,16 @@ import org.pac4j.core.util.Pac4jConstants; import org.pac4j.play.PlayWebContext; import play.mvc.Result; -import auth.sso.SsoManager; - -import static auth.AuthUtils.*; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static org.pac4j.play.store.PlayCookieSessionStore.*; -import static play.mvc.Results.internalServerError; - /** - * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D - * DataHub after an authentication attempt. + * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects + * back to D DataHub after an authentication attempt. * - * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract - * basic information about the user including their name, email, groups, & more. If just-in-time provisioning - * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups - * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist. + *

On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract + * basic information about the user including their name, email, groups, & more. If just-in-time + * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for + * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so + * if the user does not already exist. */ @Slf4j public class OidcCallbackLogic extends DefaultCallbackLogic { @@ -86,9 +86,12 @@ public class OidcCallbackLogic extends DefaultCallbackLogic httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { setContextRedirectUrl(context); final Result result = - super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, + super.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, defaultClient); // Handle OIDC authentication errors. @@ -119,14 +135,25 @@ public Result perform(PlayWebContext context, Config config, @SuppressWarnings("unchecked") private void setContextRedirectUrl(PlayWebContext context) { - Optional redirectUrl = context.getRequestCookies().stream() - .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst(); + Optional redirectUrl = + context.getRequestCookies().stream() + .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())) + .findFirst(); redirectUrl.ifPresent( - cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL, - JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); + cookie -> + context + .getSessionStore() + .set( + context, + Pac4jConstants.REQUESTED_URL, + JAVA_SER_HELPER.deserializeFromBytes( + uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); } - private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context, + private Result handleOidcCallback( + final OidcConfigs oidcConfigs, + final Result result, + final PlayWebContext context, final ProfileManager profileManager) { log.debug("Beginning OIDC Callback Handling..."); @@ -134,14 +161,17 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re if (profileManager.isAuthenticated()) { // If authenticated, the user should have a profile. final CommonProfile profile = (CommonProfile) profileManager.get(true).get(); - log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Found authenticated user with profile %s", profile.getAttributes().toString())); // Extract the User name required to log into DataHub. final String userName = extractUserNameOrThrow(oidcConfigs, profile); final CorpuserUrn corpUserUrn = new CorpuserUrn(userName); try { - // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist. + // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does + // not exist. if (oidcConfigs.isJitProvisioningEnabled()) { log.debug("Just-in-time provisioning is enabled. Beginning provisioning process..."); CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile); @@ -150,7 +180,8 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re // Extract groups & provision them. List extractedGroups = extractGroups(profile); tryProvisionGroups(extractedGroups); - // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists. + // Add users to groups on DataHub. Note that this clears existing group membership for a + // user if it already exists. updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups)); } } else if (oidcConfigs.isPreProvisioningRequired()) { @@ -160,55 +191,69 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re } // Update user status to active on login. // If we want to prevent certain users from logging in, here's where we'll want to do it. - setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE) - .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()))); + setUserStatus( + corpUserUrn, + new CorpUserStatus() + .setStatus(Constants.CORP_USER_STATUS_ACTIVE) + .setLastModified( + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()))); } catch (Exception e) { log.error("Failed to perform post authentication steps. Redirecting to error page.", e); return internalServerError( - String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage())); + String.format( + "Failed to perform post authentication steps. Error message: %s", e.getMessage())); } // Successfully logged in - Generate GMS login token final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId()); return result - .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) - .withCookies( - createActorCookie( - corpUserUrn.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) + .withCookies( + createActorCookie( + corpUserUrn.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); } return internalServerError( "Failed to authenticate current user. Cannot find valid identity provider profile in session."); } - private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) { + private String extractUserNameOrThrow( + final OidcConfigs oidcConfigs, final CommonProfile profile) { // Ensure that the attribute exists (was returned by IdP) if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) { - throw new RuntimeException(String.format( - "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", - oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())); + throw new RuntimeException( + String.format( + "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", + oidcConfigs.getUserNameClaim(), + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString())); } final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim()); - final Optional mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); - - return mappedUserName.orElseThrow(() -> new RuntimeException( - String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", - userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()))); + final Optional mappedUserName = + extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); + + return mappedUserName.orElseThrow( + () -> + new RuntimeException( + String.format( + "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", + userNameClaim, + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString()))); } - /** - * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. - */ + /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { - log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); // Extracts these based on the default set of OIDC claims, described here: // https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1 @@ -217,7 +262,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { String email = profile.getEmail(); URI picture = profile.getPictureUrl(); String displayName = profile.getDisplayName(); - String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. + String fullName = + (String) + profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. if (fullName == null && firstName != null && lastName != null) { fullName = String.format("%s %s", firstName, lastName); } @@ -231,7 +278,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { userInfo.setFullName(fullName, SetMode.IGNORE_NULL); userInfo.setEmail(email, SetMode.IGNORE_NULL); // If there is a display name, use it. Otherwise fall back to full name. - userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); + userInfo.setDisplayName( + displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo(); try { @@ -254,15 +302,18 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { private List extractGroups(CommonProfile profile) { - log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract groups from OIDC profile %s", + profile.getAttributes().toString())); final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); - // First, attempt to extract a list of groups from the profile, using the group name attribute config. + // First, attempt to extract a list of groups from the profile, using the group name attribute + // config. final List extractedGroups = new ArrayList<>(); final List groupsClaimNames = - new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))).stream() - .map(String::trim) - .collect(Collectors.toList()); + new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))) + .stream().map(String::trim).collect(Collectors.toList()); for (final String groupsClaimName : groupsClaimNames) { @@ -273,14 +324,16 @@ private List extractGroups(CommonProfile profile) { final Object groupAttribute = profile.getAttribute(groupsClaimName); if (groupAttribute instanceof Collection) { // List of group names - groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class); + groupNames = + (Collection) profile.getAttribute(groupsClaimName, Collection.class); } else if (groupAttribute instanceof String) { // Single group name groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); } else { log.error( - String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName, - groupAttribute.getClass())); + String.format( + "Fail to parse OIDC group claim with name %s. Unknown type %s provided.", + groupsClaimName, groupAttribute.getClass())); // Skip over group attribute. Do not throw. groupNames = Collections.emptyList(); } @@ -297,7 +350,8 @@ private List extractGroups(CommonProfile profile) { corpGroupInfo.setDisplayName(groupName); // To deal with the possibility of spaces, we url encode the URN group name. - final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); + final String urlEncodedGroupName = + URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName); final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot(); corpGroupSnapshot.setUrn(groupUrn); @@ -306,18 +360,23 @@ private List extractGroups(CommonProfile profile) { corpGroupSnapshot.setAspects(aspects); groupSnapshots.add(corpGroupSnapshot); } catch (UnsupportedEncodingException ex) { - log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName)); + log.error( + String.format( + "Failed to URL encoded extracted group name %s. Skipping", groupName)); } } if (groupSnapshots.isEmpty()) { - log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); + log.warn( + String.format( + "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); } else { extractedGroups.addAll(groupSnapshots); } } catch (Exception e) { - log.error(String.format( - "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", - groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + log.error( + String.format( + "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); } } } @@ -327,7 +386,8 @@ private List extractGroups(CommonProfile profile) { private GroupMembership createGroupMembership(final List extractedGroups) { final GroupMembership groupMembershipAspect = new GroupMembership(); groupMembershipAspect.setGroups( - new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + new UrnArray( + extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); return groupMembershipAspect; } @@ -345,30 +405,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { // If we find more than the key aspect, then the entity "exists". if (existingCorpUserSnapshot.getAspects().size() <= 1) { log.debug( - String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn())); + String.format( + "Extracted user that does not yet exist %s. Provisioning...", + corpUserSnapshot.getUrn())); // 2. The user does not exist. Provision them. final Entity newEntity = new Entity(); newEntity.setValue(Snapshot.create(corpUserSnapshot)); _entityClient.update(newEntity, _systemAuthentication); log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn())); } - log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); + log.debug( + String.format( + "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); // Otherwise, the user exists. Skip provisioning. } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); + throw new RuntimeException( + String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); } } private void tryProvisionGroups(List corpGroups) { - log.debug(String.format("Attempting to provision groups with urns %s", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + log.debug( + String.format( + "Attempting to provision groups with urns %s", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); // 1. Check if this user already exists. try { - final Set urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); - final Map existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication); + final Set urnsToFetch = + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); + final Map existingGroups = + _entityClient.batchGet(urnsToFetch, _systemAuthentication); log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet())); @@ -381,15 +450,21 @@ private void tryProvisionGroups(List corpGroups) { // If more than the key aspect exists, then the group already "exists". if (corpGroupSnapshot.getAspects().size() <= 1) { - log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...", - corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + corpGroupSnapshot.getUrn())); groupsToCreate.add(extractedGroup); } - log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); } else { // Should not occur until we stop returning default Key aspects for unrecognized entities. log.debug( - String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn())); + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + extractedGroup.getUrn())); groupsToCreate.add(extractedGroup); } } @@ -400,15 +475,20 @@ private void tryProvisionGroups(List corpGroups) { log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns)); // Now batch create all entities identified to create. - _entityClient.batchUpdate(groupsToCreate.stream() - .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) - .collect(Collectors.toSet()), _systemAuthentication); + _entityClient.batchUpdate( + groupsToCreate.stream() + .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) + .collect(Collectors.toSet()), + _systemAuthentication); log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns)); } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision groups with urns %s.", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e); + throw new RuntimeException( + String.format( + "Failed to provision groups with urns %s.", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), + e); } } @@ -423,12 +503,14 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { try { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to update group membership for user with urn %s", urn), e); } } private void verifyPreProvisionedUser(CorpuserUrn urn) { - // Validate that the user exists in the system (there is more than just a key aspect for them, as of today). + // Validate that the user exists in the system (there is more than just a key aspect for them, + // as of today). try { final Entity corpUser = _entityClient.get(urn, _systemAuthentication); @@ -436,9 +518,14 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) { // If we find more than the key aspect, then the entity "exists". if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) { - log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); - throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. " - + "Please contact your DataHub admin to provision an account.", urn)); + log.debug( + String.format( + "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); + throw new RuntimeException( + String.format( + "User with urn %s has not yet been provisioned in DataHub. " + + "Please contact your DataHub admin to provision an account.", + urn)); } // Otherwise, the user exists. } catch (RemoteInvocationException e) { diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index eb037db2ef9c0..6877ca187da97 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,104 +1,122 @@ package auth.sso.oidc; +import static auth.ConfigUtil.*; + import auth.sso.SsoConfigs; import java.util.Optional; import lombok.Getter; -import static auth.ConfigUtil.*; - - -/** - * Class responsible for extracting and validating OIDC related configurations. - */ +/** Class responsible for extracting and validating OIDC related configurations. */ @Getter public class OidcConfigs extends SsoConfigs { - /** - * Required configs - */ - public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; - public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; - public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + /** Required configs */ + public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; + + public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; + public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + + /** Optional configs */ + public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; + + public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; + public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; + public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; + public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = + "auth.oidc.clientAuthenticationMethod"; + public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = + "auth.oidc.jitProvisioningEnabled"; + public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = + "auth.oidc.preProvisioningRequired"; + public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; + public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = + "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. + public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; + public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; + public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; + public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; + public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; + public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = + "auth.oidc.extractJwtAccessTokenClaims"; + public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; - /** - * Optional configs - */ - public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; - public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; - public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; - public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; - public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod"; - public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled"; - public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired"; - public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; - public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. - public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; - public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; - public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; - public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; - public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; - public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; - public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + /** Default values */ + private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - /** - * Default values - */ - private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups. - private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; - private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; - private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; - private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership. - private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; - private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; + private static final String DEFAULT_OIDC_SCOPE = + "openid profile email"; // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; + private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; + private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; + private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = + "false"; // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; + private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; - private String clientId; - private String clientSecret; - private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional responseType; - private Optional responseMode; - private Optional useNonce; - private Optional customParamResource; - private String readTimeout; - private Optional extractJwtAccessTokenClaims; - private Optional preferredJwsAlgorithm; + private String clientId; + private String clientSecret; + private String discoveryUri; + private String userNameClaim; + private String userNameClaimRegex; + private String scope; + private String clientName; + private String clientAuthenticationMethod; + private boolean jitProvisioningEnabled; + private boolean preProvisioningRequired; + private boolean extractGroupsEnabled; + private String groupsClaimName; + private Optional responseType; + private Optional responseMode; + private Optional useNonce; + private Optional customParamResource; + private String readTimeout; + private Optional extractJwtAccessTokenClaims; + private Optional preferredJwsAlgorithm; - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + public OidcConfigs(final com.typesafe.config.Config configs) { + super(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = Boolean.parseBoolean( - getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = Boolean.parseBoolean( + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); - } + groupsClaimName = + getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index fd0a2e1877154..39a65a46cbf91 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -10,15 +10,15 @@ import org.pac4j.oidc.credentials.OidcCredentials; import org.pac4j.oidc.profile.OidcProfileDefinition; - /** * Implementation of {@link SsoProvider} supporting the OIDC protocol. * - * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related - * configuration options, which reside in an instance of {@link OidcConfigs}. + *

This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC + * related configuration options, which reside in an instance of {@link OidcConfigs}. * - * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that - * this class is not related to the logic performed when an IdP performs a callback to DataHub. + *

It is responsible for initializing this client from a configuration object ({@link + * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a + * callback to DataHub. */ @Slf4j public class OidcProvider implements SsoProvider { @@ -53,7 +53,8 @@ private Client createPac4jClient() { oidcConfiguration.setClientId(_oidcConfigs.getClientId()); oidcConfiguration.setSecret(_oidcConfigs.getClientSecret()); oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri()); - oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod()); + oidcConfiguration.setClientAuthenticationMethodAsString( + _oidcConfigs.getClientAuthenticationMethod()); oidcConfiguration.setScope(_oidcConfigs.getScope()); try { oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout())); @@ -63,18 +64,24 @@ private Client createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); - _oidcConfigs.getCustomParamResource() + _oidcConfigs + .getCustomParamResource() .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); - _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> { - log.info("Setting preferredJwsAlgorithm: " + preferred); - oidcConfiguration.setPreferredJwsAlgorithm(preferred); - }); + _oidcConfigs + .getPreferredJwsAlgorithm() + .ifPresent( + preferred -> { + log.info("Setting preferredJwsAlgorithm: " + preferred); + oidcConfiguration.setPreferredJwsAlgorithm(preferred); + }); final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration); oidcClient.setName(OIDC_CLIENT_NAME); - oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); + oidcClient.setCallbackUrl( + _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver()); - oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); + oidcClient.addAuthorizationGenerator( + new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); return oidcClient; } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java index 014632c17e690..9881b5e095b78 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java @@ -1,57 +1,58 @@ package auth.sso.oidc; +import static play.mvc.Results.internalServerError; +import static play.mvc.Results.unauthorized; + +import java.util.Optional; import org.pac4j.play.PlayWebContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.mvc.Result; -import java.util.Optional; - -import static play.mvc.Results.internalServerError; -import static play.mvc.Results.unauthorized; - - public class OidcResponseErrorHandler { - private OidcResponseErrorHandler() { - - } - - private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); + private OidcResponseErrorHandler() {} - private static final String ERROR_FIELD_NAME = "error"; - private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; + private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); - public static Result handleError(final PlayWebContext context) { + private static final String ERROR_FIELD_NAME = "error"; + private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; - _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'", - getError(context), - getErrorDescription(context)); + public static Result handleError(final PlayWebContext context) { - if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { - return unauthorized(String.format("Access denied. " - + "The OIDC service responded with 'Access denied'. " - + "It seems that you don't have access to this application yet. Please apply for access. \n\n" - + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " - + "Error details: '%s':'%s'", - context.getRequestParameter("error"), - context.getRequestParameter("error_description"))); - } + _logger.warn( + "OIDC responded with an error: '{}'. Error description: '{}'", + getError(context), + getErrorDescription(context)); - return internalServerError( - String.format("Internal server error. The OIDC service responded with an error: '%s'.\n" - + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse(""))); + if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { + return unauthorized( + String.format( + "Access denied. " + + "The OIDC service responded with 'Access denied'. " + + "It seems that you don't have access to this application yet. Please apply for access. \n\n" + + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " + + "Error details: '%s':'%s'", + context.getRequestParameter("error"), + context.getRequestParameter("error_description"))); } - public static boolean isError(final PlayWebContext context) { - return getError(context).isPresent() && !getError(context).get().isEmpty(); - } + return internalServerError( + String.format( + "Internal server error. The OIDC service responded with an error: '%s'.\n" + + "Error description: '%s'", + getError(context).orElse(""), getErrorDescription(context).orElse(""))); + } - public static Optional getError(final PlayWebContext context) { - return context.getRequestParameter(ERROR_FIELD_NAME); - } + public static boolean isError(final PlayWebContext context) { + return getError(context).isPresent() && !getError(context).get().isEmpty(); + } - public static Optional getErrorDescription(final PlayWebContext context) { - return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); - } + public static Optional getError(final PlayWebContext context) { + return context.getRequestParameter(ERROR_FIELD_NAME); + } + + public static Optional getErrorDescription(final PlayWebContext context) { + return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java index 8c8c250fb7e63..01f8f16171d13 100644 --- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java +++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java @@ -1,8 +1,8 @@ package auth.sso.oidc.custom; -import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenErrorResponse; import com.nimbusds.oauth2.sdk.TokenRequest; @@ -37,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class CustomOidcAuthenticator implements Authenticator { private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class); @@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient client) { this.client = client; // check authentication methods - final List metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods(); + final List metadataMethods = + configuration.findProviderMetadata().getTokenEndpointAuthMethods(); - final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration); + final ClientAuthenticationMethod preferredMethod = + getPreferredAuthenticationMethod(configuration); final ClientAuthenticationMethod chosenMethod; if (CommonHelper.isNotEmpty(metadataMethods)) { if (preferredMethod != null) { - if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) { + if (ClientAuthenticationMethod.NONE.equals(preferredMethod) + || metadataMethods.contains(preferredMethod)) { chosenMethod = preferredMethod; } else { throw new TechnicalException( @@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient client) { chosenMethod = firstSupportedMethod(metadataMethods); } } else { - chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); - logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}", + chosenMethod = + preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); + logger.info( + "Provider metadata does not provide Token endpoint authentication methods. Using: {}", chosenMethod); } @@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient client) { } /** - * The preferred {@link ClientAuthenticationMethod} specified in the given - * {@link OidcConfiguration}, or null meaning that the a - * provider-supported method should be chosen. + * The preferred {@link ClientAuthenticationMethod} specified in the given {@link + * OidcConfiguration}, or null meaning that the a provider-supported method should be + * chosen. */ - private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) { + private static ClientAuthenticationMethod getPreferredAuthenticationMethod( + OidcConfiguration config) { final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod(); if (configurationMethod == null) { return null; } if (!SUPPORTED_METHODS.contains(configurationMethod)) { - throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported."); + throw new TechnicalException( + "Configured authentication method (" + configurationMethod + ") is not supported."); } return configurationMethod; } /** - * The first {@link ClientAuthenticationMethod} from the given list of - * methods that is supported by this implementation. + * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported + * by this implementation. * - * @throws TechnicalException - * if none of the provider-supported methods is supported. + * @throws TechnicalException if none of the provider-supported methods is supported. */ - private static ClientAuthenticationMethod firstSupportedMethod(final List metadataMethods) { + private static ClientAuthenticationMethod firstSupportedMethod( + final List metadataMethods) { Optional firstSupported = metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst(); if (firstSupported.isPresent()) { return firstSupported.get(); } else { - throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: " - + metadataMethods); + throw new TechnicalException( + "None of the Token endpoint provider metadata authentication methods are supported: " + + metadataMethods); } } @@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context if (code != null) { try { final String computedCallbackUrl = client.computeFinalCallbackUrl(context); - CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever() - .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null); + CodeVerifier verifier = + (CodeVerifier) + configuration + .getValueRetriever() + .retrieve(client.getCodeVerifierSessionAttributeName(), client, context) + .orElse(null); // Token request - final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); + final TokenRequest request = + createTokenRequest( + new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); HTTPRequest tokenHttpRequest = request.toHTTPRequest(); tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout()); tokenHttpRequest.setReadTimeout(configuration.getReadTimeout()); final HTTPResponse httpResponse = tokenHttpRequest.send(); - logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(), + logger.debug( + "Token response: status={}, content={}", + httpResponse.getStatusCode(), httpResponse.getContent()); final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse); if (response instanceof TokenErrorResponse) { - throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); + throw new TechnicalException( + "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); } logger.debug("Token response successful"); final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response; @@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context private TokenRequest createTokenRequest(final AuthorizationGrant grant) { if (clientAuthentication != null) { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - this.clientAuthentication, grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + this.clientAuthentication, + grant); } else { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - new ClientID(configuration.getClientId()), grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + new ClientID(configuration.getClientId()), + grant); } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 24183f5c625da..4d40f45cd09b4 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,7 +3,6 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; - import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -17,17 +16,16 @@ import org.apache.http.util.EntityUtils; import play.mvc.Http; - -/** - * This class is responsible for coordinating authentication with the backend Metadata Service. - */ +/** This class is responsible for coordinating authentication with the backend Metadata Service. */ @Slf4j public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; private static final String SIGN_UP_ENDPOINT = "auth/signUp"; - private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; - private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/verifyNativeUserCredentials"; private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; @@ -39,7 +37,8 @@ public class AuthServiceClient { private static final String INVITE_TOKEN_FIELD = "inviteToken"; private static final String RESET_TOKEN_FIELD = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; @@ -48,8 +47,11 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; - public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + public AuthServiceClient( + @Nonnull final String metadataServiceHost, + @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, + @Nonnull final Authentication systemAuthentication, @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); @@ -59,10 +61,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin } /** - * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails. + * Call the Auth Service to generate a session token for a particular user with a unique actor id, + * or throws an exception if generation fails. * - * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type - * USER. + *

Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of + * an Actor of type USER. */ @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { @@ -72,15 +75,21 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - GENERATE_SESSION_TOKEN_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + GENERATE_SESSION_TOKEN_ENDPOINT)); // Build JSON request to generate a token on behalf of a user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_ID_FIELD, userId); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -94,7 +103,8 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { return getAccessTokenFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", + String.format( + "Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { @@ -110,11 +120,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } - /** - * Call the Auth Service to create a native Datahub user. - */ - public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, - @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + /** Call the Auth Service to create a native Datahub user. */ + public boolean signUp( + @Nonnull final String userUrn, + @Nonnull final String fullName, + @Nonnull final String email, + @Nonnull final String title, + @Nonnull final String password, + @Nonnull final String inviteToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(fullName, "fullName must not be null"); Objects.requireNonNull(email, "email must not be null"); @@ -126,9 +139,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - SIGN_UP_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT)); // Build JSON request to sign up a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -139,7 +154,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN objectNode.put(TITLE_FIELD, title); objectNode.put(PASSWORD_FIELD, password); objectNode.put(INVITE_TOKEN_FIELD, inviteToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -152,11 +168,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN final String jsonStr = EntityUtils.toString(entity); return getIsNativeUserCreatedFromJson(jsonStr); } else { - String content = response.getEntity().getContent() == null ? "" : new String( - response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + String content = + response.getEntity().getContent() == null + ? "" + : new String( + response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(), - response.getEntity().toString(), content)); + String.format( + "Bad response from the Metadata Service: %s %s Body: %s", + response.getStatusLine().toString(), response.getEntity().toString(), content)); } } catch (Exception e) { throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); @@ -171,10 +191,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN } } - /** - * Call the Auth Service to reset credentials for a native DataHub user. - */ - public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + /** Call the Auth Service to reset credentials for a native DataHub user. */ + public boolean resetNativeUserCredentials( + @Nonnull final String userUrn, + @Nonnull final String password, @Nonnull final String resetToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); @@ -184,9 +204,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -194,7 +219,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); objectNode.put(RESET_TOKEN_FIELD, resetToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -208,8 +234,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul return getAreNativeUserCredentialsResetFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to reset credentials for user", e); @@ -224,10 +251,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul } } - /** - * Call the Auth Service to verify the credentials for a native Datahub user. - */ - public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + /** Call the Auth Service to verify the credentials for a native Datahub user. */ + public boolean verifyNativeUserCredentials( + @Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); CloseableHttpResponse response = null; @@ -235,16 +261,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -258,8 +290,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu return getDoesPasswordMatchFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to verify credentials for user", e); @@ -274,18 +307,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu } } - /** - * Call the Auth Service to track an analytics event - */ + /** Call the Auth Service to track an analytics event */ public void track(@Nonnull final String event) { Objects.requireNonNull(event, "event must not be null"); CloseableHttpResponse response = null; try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - TRACK_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT)); // Build JSON request to track event. request.setEntity(new StringEntity(event, StandardCharsets.UTF_8)); @@ -298,8 +331,9 @@ public void track(@Nonnull final String event) { if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to track event", e); diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index 59e91a6d5a0f7..b7173684b6350 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -3,6 +3,15 @@ import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; import config.ConfigurationProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.inject.Inject; +import javax.inject.Singleton; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -15,98 +24,141 @@ import play.api.inject.ApplicationLifecycle; import utils.ConfigUtil; -import javax.inject.Inject; - -import javax.annotation.Nonnull; -import javax.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - @Singleton public class KafkaTrackingProducer { - private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); - private static final List KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), - SecurityProtocol.SASL_PLAINTEXT.name())); - - private final Boolean _isEnabled; - private final KafkaProducer _producer; - - @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) { - _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); - - if (_isEnabled) { - _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); - - lifecycle.addStopHook( - () -> { - _producer.flush(); - _producer.close(); - return CompletableFuture.completedFuture(null); - }); - } else { - _logger.debug("Analytics tracking is disabled"); - _producer = null; - } - } - - public Boolean isEnabled() { - return _isEnabled; + private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); + private static final List KAFKA_SSL_PROTOCOLS = + Collections.unmodifiableList( + Arrays.asList( + SecurityProtocol.SSL.name(), + SecurityProtocol.SASL_SSL.name(), + SecurityProtocol.SASL_PLAINTEXT.name())); + + private final Boolean _isEnabled; + private final KafkaProducer _producer; + + @Inject + public KafkaTrackingProducer( + @Nonnull Config config, + ApplicationLifecycle lifecycle, + final ConfigurationProvider configurationProvider) { + _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); + + if (_isEnabled) { + _logger.debug("Analytics tracking is enabled"); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); + + lifecycle.addStopHook( + () -> { + _producer.flush(); + _producer.close(); + return CompletableFuture.completedFuture(null); + }); + } else { + _logger.debug("Analytics tracking is disabled"); + _producer = null; } - - public void send(ProducerRecord record) { - _producer.send(record); + } + + public Boolean isEnabled() { + return _isEnabled; + } + + public void send(ProducerRecord record) { + _producer.send(record); + } + + private static KafkaProducer createKafkaProducer( + Config config, ProducerConfiguration producerConfiguration) { + final Properties props = new Properties(); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + config.getString("analytics.kafka.delivery.timeout.ms")); + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + config.getString("analytics.kafka.bootstrap.server")); + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); + + final String securityProtocolConfig = "analytics.kafka.security.protocol"; + if (config.hasPath(securityProtocolConfig) + && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { + props.put( + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); + setConfig( + config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); + + setConfig( + config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.keystore.location"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.keystore.password"); + + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + "analytics.kafka.ssl.truststore.type"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.truststore.location"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.truststore.password"); + + setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); + setConfig( + config, + props, + SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, + "analytics.kafka.ssl.endpoint.identification.algorithm"); + + final String securityProtocol = config.getString(securityProtocolConfig); + if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) + || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { + setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); + setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); + setConfig( + config, + props, + SaslConfigs.SASL_KERBEROS_SERVICE_NAME, + "analytics.kafka.sasl.kerberos.service.name"); + setConfig( + config, + props, + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.login.callback.handler.class"); + setConfig( + config, + props, + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.client.callback.handler.class"); + } } - private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) { - final Properties props = new Properties(); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); - - final String securityProtocolConfig = "analytics.kafka.security.protocol"; - if (config.hasPath(securityProtocolConfig) - && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { - props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); - setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); - - setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password"); - - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password"); - - setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); - setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm"); - - final String securityProtocol = config.getString(securityProtocolConfig); - if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) - || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { - setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); - setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); - setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name"); - setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class"); - setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class"); - } - } - - return new org.apache.kafka.clients.producer.KafkaProducer(props); - } + return new org.apache.kafka.clients.producer.KafkaProducer(props); + } - private static void setConfig(Config config, Properties props, String key, String configKey) { - Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) - .ifPresent(v -> props.put(key, v)); - } + private static void setConfig(Config config, Properties props, String key, String configKey) { + Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) + .ifPresent(v -> props.put(key, v)); + } } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 8f526c831b5c9..3d87267f8ebe3 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -4,28 +4,22 @@ import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; - import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; - /** - * Minimal sharing between metadata-service and frontend - * Does not use the factories module to avoid transitive dependencies. + * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid + * transitive dependencies. */ @EnableConfigurationProperties @PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class) @ConfigurationProperties @Data public class ConfigurationProvider { - /** - * Kafka related configs. - */ - private KafkaConfiguration kafka; + /** Kafka related configs. */ + private KafkaConfiguration kafka; - /** - * Configuration for caching - */ - private CacheConfiguration cache; + /** Configuration for caching */ + private CacheConfiguration cache; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 5c76f2572a936..60971bf06e27b 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -1,5 +1,8 @@ package controllers; +import static auth.AuthUtils.ACTOR; +import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; + import akka.actor.ActorSystem; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -9,41 +12,35 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.util.Pair; import com.typesafe.config.Config; - +import java.io.InputStream; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.Environment; import play.http.HttpEntity; +import play.libs.Json; import play.libs.ws.InMemoryBodyWritable; import play.libs.ws.StandaloneWSClient; -import play.libs.Json; import play.libs.ws.ahc.StandaloneAhcWSClient; import play.mvc.Controller; import play.mvc.Http; import play.mvc.ResponseHeader; import play.mvc.Result; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.inject.Inject; -import java.io.InputStream; import play.mvc.Security; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig; import utils.ConfigUtil; -import java.time.Duration; - -import static auth.AuthUtils.ACTOR; -import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; - public class Application extends Controller { private final Logger _logger = LoggerFactory.getLogger(Application.class.getName()); @@ -61,22 +58,17 @@ public Application(Environment environment, @Nonnull Config config) { /** * Serves the build output index.html for any given path * - * @param path takes a path string, which essentially is ignored - * routing is managed client side + * @param path takes a path string, which essentially is ignored routing is managed client side * @return {Result} build output index.html resource */ @Nonnull private Result serveAsset(@Nullable String path) { try { InputStream indexHtml = _environment.resourceAsStream("public/index.html"); - return ok(indexHtml) - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html"); } catch (Exception e) { _logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?"); - return notFound() - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return notFound().withHeader("Cache-Control", "no-cache").as("text/html"); } } @@ -99,66 +91,87 @@ public Result index(@Nullable String path) { /** * Proxies requests to the Metadata Service * - * TODO: Investigate using mutual SSL authentication to call Metadata Service. + *

TODO: Investigate using mutual SSL authentication to call Metadata Service. */ @Security.Authenticated(Authenticator.class) - public CompletableFuture proxy(String path, Http.Request request) throws ExecutionException, InterruptedException { + public CompletableFuture proxy(String path, Http.Request request) + throws ExecutionException, InterruptedException { final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request); final String resolvedUri = mapPath(request.uri()); - final String metadataServiceHost = ConfigUtil.getString( - _config, - ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = ConfigUtil.getInt( - _config, - ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = ConfigUtil.getBoolean( - _config, - ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); + final String metadataServiceHost = + ConfigUtil.getString( + _config, + ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); + final int metadataServicePort = + ConfigUtil.getInt( + _config, + ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); + final boolean metadataServiceUseSsl = + ConfigUtil.getBoolean( + _config, + ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); // TODO: Fully support custom internal SSL. final String protocol = metadataServiceUseSsl ? "https" : "http"; final Map> headers = request.getHeaders().toMap(); - if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { - headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); + if (headers.containsKey(Http.HeaderNames.HOST) + && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { + headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } - return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) + return _ws.url( + String.format( + "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) .setMethod(request.method()) - .setHeaders(headers - .entrySet() - .stream() - // Remove X-DataHub-Actor to prevent malicious delegation. - .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) - // Remove Host s.th. service meshes do not route to wrong host - .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ) + .setHeaders( + headers.entrySet().stream() + // Remove X-DataHub-Actor to prevent malicious delegation. + .filter( + entry -> + !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase( + entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) + // Remove Host s.th. service meshes do not route to wrong host + .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue) - .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) - .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json")) + .addHeader( + AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) + .setBody( + new InMemoryBodyWritable( + ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), + "application/json")) .setRequestTimeout(Duration.ofSeconds(120)) .execute() - .thenApply(apiResponse -> { - final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders() - .entrySet() - .stream() - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); - final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType())); - return new Result(header, body); - }).toCompletableFuture(); + .thenApply( + apiResponse -> { + final ResponseHeader header = + new ResponseHeader( + apiResponse.getStatus(), + apiResponse.getHeaders().entrySet().stream() + .filter( + entry -> + !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter( + entry -> + !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); + final HttpEntity body = + new HttpEntity.Strict( + apiResponse.getBodyAsBytes(), + Optional.ofNullable(apiResponse.getContentType())); + return new Result(header, body); + }) + .toCompletableFuture(); } /** @@ -173,11 +186,13 @@ public Result appConfig() { config.put("appVersion", _config.getString("app.version")); config.put("isInternal", _config.getBoolean("linkedin.internal")); config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage")); - config.put("suggestionConfidenceThreshold", + config.put( + "suggestionConfidenceThreshold", Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold"))); config.set("wikiLinks", wikiLinks()); config.set("tracking", trackingInfo()); - // In a staging environment, we can trigger this flag to be true so that the UI can handle based on + // In a staging environment, we can trigger this flag to be true so that the UI can handle based + // on // such config and alert users that their changes will not affect production data config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner")); config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner")); @@ -206,6 +221,7 @@ public Result appConfig() { /** * Creates a JSON object of profile / avatar properties + * * @return Json avatar / profile image properties */ @Nonnull @@ -273,23 +289,26 @@ private StandaloneWSClient createWsClient() { } /** - * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service. + * Returns the value of the Authorization Header to be provided when proxying requests to the + * downstream Metadata Service. * - * Currently, the Authorization header value may be derived from + *

Currently, the Authorization header value may be derived from * - * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set - * when creating the session token initially from a token granted by the Metadata Service. + *

a) The value of the "token" attribute of the Session Cookie provided by the client. This + * value is set when creating the session token initially from a token granted by the Metadata + * Service. * - * Or if the "token" attribute cannot be found in a session cookie, then we fallback to + *

Or if the "token" attribute cannot be found in a session cookie, then we fallback to * - * b) The value of the Authorization - * header provided in the original request. This will be used in cases where clients are making programmatic requests - * to Metadata Service APIs directly, without providing a session cookie (ui only). + *

b) The value of the Authorization header provided in the original request. This will be used + * in cases where clients are making programmatic requests to Metadata Service APIs directly, + * without providing a session cookie (ui only). * - * If neither are found, an empty string is returned. + *

If neither are found, an empty string is returned. */ private String getAuthorizationHeaderValueToProxy(Http.Request request) { - // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply + // If the session cookie has an authorization token, use that. If there's an authorization + // header provided, simply // use that. String value = ""; if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) { @@ -301,11 +320,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) { } /** - * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along - * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility. + * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This + * is sent along with any requests that have a valid frontend session cookie to identify the + * calling actor, for backwards compatibility. * - * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come - * from the authentication credentials provided in the Authorization header. + *

If Metadata Service authentication is enabled, this value is not required because Actor + * context will most often come from the authentication credentials provided in the Authorization + * header. */ private String getDataHubActorHeader(Http.Request request) { String actor = request.session().data().get(ACTOR); diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index e28d4ba2ee37e..9c232e965a003 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -1,5 +1,9 @@ package controllers; +import static auth.AuthUtils.*; +import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; +import static org.pac4j.play.store.PlayCookieSessionStore.*; + import auth.AuthUtils; import auth.CookieConfigs; import auth.JAASConfigs; @@ -35,325 +39,337 @@ import play.mvc.Results; import security.AuthenticationManager; -import static auth.AuthUtils.*; -import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; -import static org.pac4j.play.store.PlayCookieSessionStore.*; - - // TODO add logging. public class AuthenticationController extends Controller { - public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; - private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; - private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; - private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; - - private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp"; - - private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); - private final CookieConfigs _cookieConfigs; - private final JAASConfigs _jaasConfigs; - private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; - private final boolean _verbose; - - @Inject - private org.pac4j.core.config.Config _ssoConfig; - - @Inject - private PlaySessionStore _playSessionStore; - - @Inject - private SsoManager _ssoManager; - - @Inject - AuthServiceClient _authClient; - - @Inject - public AuthenticationController(@Nonnull Config configs) { - _cookieConfigs = new CookieConfigs(configs); - _jaasConfigs = new JAASConfigs(configs); - _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); - _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; + private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; + private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; + private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; + + private static final String SSO_NO_REDIRECT_MESSAGE = + "SSO is configured, however missing redirect from idp"; + + private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); + private final CookieConfigs _cookieConfigs; + private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; + private final boolean _verbose; + + @Inject private org.pac4j.core.config.Config _ssoConfig; + + @Inject private PlaySessionStore _playSessionStore; + + @Inject private SsoManager _ssoManager; + + @Inject AuthServiceClient _authClient; + + @Inject + public AuthenticationController(@Nonnull Config configs) { + _cookieConfigs = new CookieConfigs(configs); + _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); + _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + } + + /** + * Route used to perform authentication, or redirect to log in if authentication fails. + * + *

If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider + * (Indirect auth). If not, we will fall back to the default username / password login experience + * (Direct auth). + */ + @Nonnull + public Result authenticate(Http.Request request) { + + // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is + // authenticated. + + final Optional maybeRedirectPath = + Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); + final String redirectPath = maybeRedirectPath.orElse("/"); + + if (AuthUtils.hasValidSessionCookie(request)) { + return Results.redirect(redirectPath); } - /** - * Route used to perform authentication, or redirect to log in if authentication fails. - * - * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth). - * If not, we will fall back to the default username / password login experience (Direct auth). - */ - @Nonnull - public Result authenticate(Http.Request request) { - - // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated. - - final Optional maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); - - if (AuthUtils.hasValidSessionCookie(request)) { - return Results.redirect(redirectPath); - } - - // 1. If SSO is enabled, redirect to IdP if not authenticated. - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, redirectPath).orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - - // 2. If either JAAS auth or Native auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { - return Results.redirect( - LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); - } - - // 3. If no auth enabled, fallback to using default user account & redirect. - // Generate GMS session token, TODO: - final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); - return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) - .withCookies( - createActorCookie( - DEFAULT_ACTOR_URN.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + // 1. If SSO is enabled, redirect to IdP if not authenticated. + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, redirectPath) + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); } - /** - * Redirect to the identity provider for authentication. - */ - @Nonnull - public Result sso(Http.Request request) { - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, "/").orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() + || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); } - /** - * Log in a user based on a username + password. - * - * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default. - */ - @Nonnull - public Result logIn(Http.Request request) { - boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); - _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; - if (noAuthEnabled) { - String message = "Neither JAAS nor native authentication is enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } - - final JsonNode json = request.body().asJson(); - final String username = json.findPath(USER_NAME).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - - if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); - return Results.badRequest(invalidCredsJson); - } - - JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); - boolean loginSucceeded = tryLogin(username, password); - - if (!loginSucceeded) { - return Results.badRequest(invalidCredsJson); - } - - final Urn actorUrn = new CorpuserUrn(username); - final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - return createSession(actorUrn.toString(), accessToken); + // 3. If no auth enabled, fallback to using default user account & redirect. + // Generate GMS session token, TODO: + final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); + return Results.redirect(redirectPath) + .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) + .withCookies( + createActorCookie( + DEFAULT_ACTOR_URN.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } + + /** Redirect to the identity provider for authentication. */ + @Nonnull + public Result sso(Http.Request request) { + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, "/") + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); + } + return Results.redirect( + LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + } + + /** + * Log in a user based on a username + password. + * + *

TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the + * default. + */ + @Nonnull + public Result logIn(Http.Request request) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); } - /** - * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token. - * - */ - @Nonnull - public Result signUp(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } + final JsonNode json = request.body().asJson(); + final String username = json.findPath(USER_NAME).textValue(); + final String password = json.findPath(PASSWORD).textValue(); - final JsonNode json = request.body().asJson(); - final String fullName = json.findPath(FULL_NAME).textValue(); - final String email = json.findPath(EMAIL).textValue(); - final String title = json.findPath(TITLE).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + if (StringUtils.isBlank(username)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(fullName)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); - return Results.badRequest(invalidCredsJson); - } + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { - Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); - if (!emailValidator.isValid(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - } + if (!loginSucceeded) { + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn actorUrn = new CorpuserUrn(username); + final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); + return createSession(actorUrn.toString(), accessToken); + } + + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match + * an existing invite token. + */ + @Nonnull + public Result signUp(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); + } - if (StringUtils.isBlank(title)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); - if (StringUtils.isBlank(inviteToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } } - /** - * Reset a native user's credentials based on a username, old password, and new password. - * - */ - @Nonnull - public Result resetNativeUserCredentials(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return badRequest(error); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final JsonNode json = request.body().asJson(); - final String email = json.findPath(EMAIL).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String resetToken = json.findPath(RESET_TOKEN).textValue(); + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = + Json.newObject().put("message", "Invite token must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + /** Reset a native user's credentials based on a username, old password, and new password. */ + @Nonnull + public Result resetNativeUserCredentials(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } - if (StringUtils.isBlank(resetToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); } - private Optional redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) { - final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); - final Client client = _ssoManager.getSsoProvider().client(); - configurePac4jSessionStore(playWebContext, client, redirectPath); - try { - final Optional action = client.getRedirectionAction(playWebContext); - return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); - } catch (Exception e) { - if (_verbose) { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e); - } else { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); - } - return Optional.of(Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8)))); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); } - private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) { - // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session - // to reduce size of the session cookie - FoundAction foundAction = new FoundAction(redirectPath); - byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); - String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); - context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); - // This is to prevent previous login attempts from being cached. - // We replicate the logic here, which is buried in the Pac4j client. - if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) { - _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors."); - _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); - } + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return Results.badRequest(invalidCredsJson); } - private String encodeRedirectUri(final String redirectUri) { - return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + private Optional redirectToIdentityProvider( + Http.RequestHeader request, String redirectPath) { + final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); + final Client client = _ssoManager.getSsoProvider().client(); + configurePac4jSessionStore(playWebContext, client, redirectPath); + try { + final Optional action = client.getRedirectionAction(playWebContext); + return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); + } catch (Exception e) { + if (_verbose) { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", + e); + } else { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); + } + return Optional.of( + Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8)))); } - - private boolean tryLogin(String username, String password) { - boolean loginSucceeded = false; - - // First try jaas login, if enabled - if (_jaasConfigs.isJAASEnabled()) { - try { - _logger.debug("Attempting jaas authentication"); - AuthenticationManager.authenticateJaasUser(username, password); - _logger.debug("Jaas authentication successful. Login succeeded"); - loginSucceeded = true; - } catch (Exception e) { - if (_verbose) { - _logger.debug("Jaas authentication error. Login failed", e); - } else { - _logger.debug("Jaas authentication error. Login failed"); - } - } - } - - // If jaas login fails or is disabled, try native auth login - if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { - final Urn userUrn = new CorpuserUrn(username); - final String userUrnString = userUrn.toString(); - loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + private void configurePac4jSessionStore( + PlayWebContext context, Client client, String redirectPath) { + // Set the originally requested path for post-auth redirection. We split off into a separate + // cookie from the session + // to reduce size of the session cookie + FoundAction foundAction = new FoundAction(redirectPath); + byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); + String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); + context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); + // This is to prevent previous login attempts from being cached. + // We replicate the logic here, which is buried in the Pac4j client. + if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) + != null) { + _logger.debug( + "Found previous login attempt. Removing it manually to prevent unexpected errors."); + _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); + } + } + + private String encodeRedirectUri(final String redirectUri) { + return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + } + + private boolean tryLogin(String username, String password) { + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting jaas authentication"); + AuthenticationManager.authenticateJaasUser(username, password); + _logger.debug("Jaas authentication successful. Login succeeded"); + loginSucceeded = true; + } catch (Exception e) { + if (_verbose) { + _logger.debug("Jaas authentication error. Login failed", e); + } else { + _logger.debug("Jaas authentication error. Login failed"); } - - return loginSucceeded; + } } - private Result createSession(String userUrnString, String accessToken) { - return Results.ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies( - createActorCookie( - userUrnString, - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); - + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = + loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); } -} \ No newline at end of file + + return loginSucceeded; + } + + private Result createSession(String userUrnString, String accessToken) { + return Results.ok() + .withSession(createSessionMap(userUrnString, accessToken)) + .withCookies( + createActorCookie( + userUrnString, + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } +} diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index 5e24fe9f8220c..eea1c662ebf89 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -2,18 +2,15 @@ import com.typesafe.config.Config; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; import org.pac4j.play.LogoutController; import play.mvc.Http; import play.mvc.Result; import play.mvc.Results; -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; - -/** - * Responsible for handling logout logic with oidc providers - */ +/** Responsible for handling logout logic with oidc providers */ @Slf4j public class CentralLogoutController extends LogoutController { private static final String AUTH_URL_CONFIG_PATH = "/login"; @@ -28,26 +25,27 @@ public CentralLogoutController(Config config) { setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*"); setLocalLogout(true); setCentralLogout(true); - } - /** - * logout() method should not be called if oidc is not enabled - */ + /** logout() method should not be called if oidc is not enabled */ public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { - log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); + log.error( + "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", + e); return redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8))) - .withNewSession(); + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8))) + .withNewSession(); } } - return Results.redirect(AUTH_URL_CONFIG_PATH) - .withNewSession(); + return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession(); } } diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 7a4b5585cc21a..9f4445b1aa5c7 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -1,6 +1,9 @@ package controllers; import auth.CookieConfigs; +import auth.sso.SsoManager; +import auth.sso.SsoProvider; +import auth.sso.oidc.OidcCallbackLogic; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; @@ -18,17 +21,13 @@ import org.pac4j.play.PlayWebContext; import play.mvc.Http; import play.mvc.Result; -import auth.sso.oidc.OidcCallbackLogic; -import auth.sso.SsoManager; -import auth.sso.SsoProvider; import play.mvc.Results; - /** * A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after * off-platform authentication. * - * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines + *

Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines * the handling logic to invoke. */ @Slf4j @@ -46,56 +45,88 @@ public SsoCallbackController( _ssoManager = ssoManager; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); - setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs))); + setCallbackLogic( + new SsoCallbackLogic( + ssoManager, + systemAuthentication, + entityClient, + authClient, + new CookieConfigs(configs))); } public CompletionStage handleCallback(String protocol, Http.Request request) { if (shouldHandleCallback(protocol)) { log.debug(String.format("Handling SSO callback. Protocol: %s", protocol)); - return callback(request).handle((res, e) -> { - if (e != null) { - log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e); - return Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode( - "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", - StandardCharsets.UTF_8))) - .discardingCookie("actor") - .withNewSession(); - } - return res; - }); + return callback(request) + .handle( + (res, e) -> { + if (e != null) { + log.error( + "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", + e); + return Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", + StandardCharsets.UTF_8))) + .discardingCookie("actor") + .withNewSession(); + } + return res; + }); } - return CompletableFuture.completedFuture(Results.internalServerError( - String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); + return CompletableFuture.completedFuture( + Results.internalServerError( + String.format( + "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); } - - /** - * Logic responsible for delegating to protocol-specific callback logic. - */ + /** Logic responsible for delegating to protocol-specific callback logic. */ public class SsoCallbackLogic implements CallbackLogic { private final OidcCallbackLogic _oidcCallbackLogic; - SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) { - _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); + SsoCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + _oidcCallbackLogic = + new OidcCallbackLogic( + ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) { - return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient); + return _oidcCallbackLogic.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, + defaultClient); } // Should never occur. - throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC."); + throw new UnsupportedOperationException( + "Failed to find matching SSO Provider. Only one supported is OIDC."); } } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + return _ssoManager.isSsoEnabled() + && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); } } diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 776ab5cad58ff..254a8cc640d0c 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,14 +1,15 @@ package controllers; +import static auth.AuthUtils.ACTOR; + import auth.Authenticator; import client.AuthServiceClient; +import client.KafkaTrackingProducer; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Singleton; - - import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,57 +17,52 @@ import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import client.KafkaTrackingProducer; - -import static auth.AuthUtils.ACTOR; - // TODO: Migrate this to metadata-service. @Singleton public class TrackingController extends Controller { - private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); + private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); - private final String _topic; + private final String _topic; - @Inject - KafkaTrackingProducer _producer; + @Inject KafkaTrackingProducer _producer; - @Inject - AuthServiceClient _authClient; + @Inject AuthServiceClient _authClient; - @Inject - public TrackingController(@Nonnull Config config) { - _topic = config.getString("analytics.tracking.topic"); - } + @Inject + public TrackingController(@Nonnull Config config) { + _topic = config.getString("analytics.tracking.topic"); + } - @Security.Authenticated(Authenticator.class) - @Nonnull - public Result track(Http.Request request) throws Exception { - if (!_producer.isEnabled()) { - // If tracking is disabled, simply return a 200. - return status(200); - } + @Security.Authenticated(Authenticator.class) + @Nonnull + public Result track(Http.Request request) throws Exception { + if (!_producer.isEnabled()) { + // If tracking is disabled, simply return a 200. + return status(200); + } - JsonNode event; - try { - event = request.body().asJson(); - } catch (Exception e) { - return badRequest(); - } - final String actor = request.session().data().get(ACTOR); - try { - _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); - final ProducerRecord record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _authClient.track(event.toString()); - return ok(); - } catch (Exception e) { - _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); - return internalServerError(e.getMessage()); - } + JsonNode event; + try { + event = request.body().asJson(); + } catch (Exception e) { + return badRequest(); + } + final String actor = request.session().data().get(ACTOR); + try { + _logger.debug( + String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); + final ProducerRecord record = + new ProducerRecord<>(_topic, actor, event.toString()); + _producer.send(record); + _authClient.track(event.toString()); + return ok(); + } catch (Exception e) { + _logger.error( + String.format( + "Failed to emit product analytics event. actor: %s, event: %s", actor, event)); + return internalServerError(e.getMessage()); } + } } diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java index 8af90b37a6f31..55752644ada70 100644 --- a/datahub-frontend/app/security/AuthUtil.java +++ b/datahub-frontend/app/security/AuthUtil.java @@ -8,52 +8,53 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.digest.HmacAlgorithms; - -/** - * Auth Utils - * Adheres to HSEC requirement for creating application tokens - */ +/** Auth Utils Adheres to HSEC requirement for creating application tokens */ public final class AuthUtil { private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString(); private static final String DELIIMITER = ":"; private static final String HEX_CHARS = "0123456789ABCDEF"; - private AuthUtil() { } + private AuthUtil() {} /** * Generate hash string using the secret HMAC Key + * * @param value value to be hashed * @param hmacKey secret HMAC key * @return Hashed string using the secret key * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ - public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException { - //Time-stamp at Encryption time + public static String generateHash(String value, byte[] hmacKey) + throws NoSuchAlgorithmException, InvalidKeyException { + // Time-stamp at Encryption time long tStamp = System.currentTimeMillis(); String uTValue = new String(); String cValue; String finalEncValue; - //Concatenated Values + // Concatenated Values uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp)); cValue = uTValue; - //Digest - HMAC-SHA256 + // Digest - HMAC-SHA256 SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); byte[] rawHmac = mac.doFinal(uTValue.getBytes()); String hmacString = getHex(rawHmac); - finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); + finalEncValue = + Base64.getEncoder() + .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); return finalEncValue; } /** * Validate the one-way hash string + * * @param hashedValue Hashed value to be validated * @param hmacKey HMAC Key used to create the hash * @param sessionWindow previously defined session window to validate if the hash is expired @@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg */ public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow) throws GeneralSecurityException { - //Username:Timestamp:SignedHMAC(Username:Timestamp) + // Username:Timestamp:SignedHMAC(Username:Timestamp) String[] decryptedHash = decryptBase64Hash(hashedValue); String username = decryptedHash[0]; String timestamp = decryptedHash[1]; @@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session long newTStamp = System.currentTimeMillis(); String newUTValue = username.concat(DELIIMITER).concat(timestamp); - //Digest - HMAC-SHA1 Verify + // Digest - HMAC-SHA1 Verify SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); @@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session return decryptedHash[0]; } + /** * Decrypt base64 hash + * * @param value base 64 hash string * @return Decrypted base 64 string */ @@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) { String decodedBase64 = new String(Base64.getDecoder().decode(value)); return decodedBase64.split(DELIIMITER); } + /** * Get Hex string from byte array + * * @param raw byte array * @return Hex representation of the byte array */ @@ -114,14 +119,16 @@ private static String getHex(byte[] raw) { return hex.toString(); } + /** * Compares two HMAC byte arrays + * * @param a HMAC byte array 1 * @param b HMAC byte array 2 * @return true if the two HMAC are identical */ private static boolean isEqual(byte[] a, byte[] b) { - if (a == null || b == null || a.length != b.length) { + if (a == null || b == null || a.length != b.length) { return false; } @@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) { return result == 0; } -} \ No newline at end of file +} diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 67bcf7e404335..f46dc57c232bd 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -15,13 +15,12 @@ import org.eclipse.jetty.jaas.PropertyUserStoreManager; import play.Logger; - public class AuthenticationManager { - private AuthenticationManager(boolean verbose) { - } + private AuthenticationManager(boolean verbose) {} - public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) + throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); @@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); JAASLoginService.INSTANCE.set(jaasLoginService); try { - LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); + LoginContext lc = + new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); lc.login(); } catch (LoginException le) { - AuthenticationException authenticationException = new AuthenticationException(le.getMessage()); + AuthenticationException authenticationException = + new AuthenticationException(le.getMessage()); authenticationException.setRootCause(le); throw authenticationException; } @@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) { NameCallback nc = null; PasswordCallback pc = null; for (Callback callback : callbacks) { - Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback); + Logger.debug( + "The submitted callback is of type: " + callback.getClass() + " : " + callback); if (callback instanceof NameCallback) { nc = (NameCallback) callback; nc.setName(this.username); diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java index 56822f0805be4..c46fa29e1599a 100644 --- a/datahub-frontend/app/security/DummyLoginModule.java +++ b/datahub-frontend/app/security/DummyLoginModule.java @@ -1,21 +1,22 @@ package security; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; -import java.util.Map; - /** - * This LoginModule performs dummy authentication. - * Any username and password can work for authentication + * This LoginModule performs dummy authentication. Any username and password can work for + * authentication */ public class DummyLoginModule implements LoginModule { - public void initialize(final Subject subject, final CallbackHandler callbackHandler, - final Map sharedState, final Map options) { - } + public void initialize( + final Subject subject, + final CallbackHandler callbackHandler, + final Map sharedState, + final Map options) {} public boolean login() throws LoginException { return true; @@ -32,5 +33,4 @@ public boolean abort() throws LoginException { public boolean logout() throws LoginException { return true; } - -} \ No newline at end of file +} diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java index b99a5e123b9eb..5c80389c96da4 100644 --- a/datahub-frontend/app/utils/ConfigUtil.java +++ b/datahub-frontend/app/utils/ConfigUtil.java @@ -3,18 +3,16 @@ import com.linkedin.util.Configuration; import com.typesafe.config.Config; - public class ConfigUtil { - private ConfigUtil() { - - } + private ConfigUtil() {} // New configurations, provided via application.conf file. public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host"; public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port"; public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl"; - public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol"; + public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = + "metadataService.sslProtocol"; // Legacy env-var based config values, for backwards compatibility: public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST"; @@ -27,10 +25,14 @@ private ConfigUtil() { public static final String DEFAULT_GMS_PORT = "8080"; public static final String DEFAULT_GMS_USE_SSL = "False"; - public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); - public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); - public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); - public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); + public static final String DEFAULT_METADATA_SERVICE_HOST = + Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); + public static final Integer DEFAULT_METADATA_SERVICE_PORT = + Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); + public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = + Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); + public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = + Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); public static boolean getBoolean(Config config, String key) { return config.hasPath(key) && config.getBoolean(key); diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java index 2c52ff5b40156..803c70a63646a 100644 --- a/datahub-frontend/app/utils/SearchUtil.java +++ b/datahub-frontend/app/utils/SearchUtil.java @@ -2,29 +2,26 @@ import javax.annotation.Nonnull; - -/** - * Utility functions for Search - */ +/** Utility functions for Search */ public class SearchUtil { - private SearchUtil() { - //utility class - } + private SearchUtil() { + // utility class + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - * - * @param input - * @return - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + * + * @param input + * @return + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 9a5fb3210a311..a1b97701dbf88 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -55,8 +55,6 @@ tasks.withType(Checkstyle) { exclude "**/generated/**" } -checkstyleMain.source = "app/" - /* PLAY UPGRADE NOTE diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index f27fefdb79669..a5da0951d1632 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -1,11 +1,22 @@ package app; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static play.mvc.Http.Status.NOT_FOUND; +import static play.mvc.Http.Status.OK; +import static play.test.Helpers.fakeRequest; +import static play.test.Helpers.route; + import com.nimbusds.jwt.JWT; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.JWTParser; import controllers.routes; +import java.io.IOException; +import java.net.InetAddress; import java.text.ParseException; import java.util.Date; +import java.util.List; +import java.util.Map; import no.nav.security.mock.oauth2.MockOAuth2Server; import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback; import okhttp3.mockwebserver.MockResponse; @@ -26,22 +37,9 @@ import play.mvc.Http; import play.mvc.Result; import play.test.Helpers; - import play.test.TestBrowser; import play.test.WithBrowser; -import java.io.IOException; -import java.net.InetAddress; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static play.mvc.Http.Status.NOT_FOUND; -import static play.mvc.Http.Status.OK; -import static play.test.Helpers.fakeRequest; -import static play.test.Helpers.route; - @TestInstance(TestInstance.Lifecycle.PER_CLASS) @SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test") @SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "") @@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser { @Override protected Application provideApplication() { return new GuiceApplicationBuilder() - .configure("metadataService.port", String.valueOf(gmsServerPort())) - .configure("auth.baseUrl", "http://localhost:" + providePort()) - .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration") - .in(new Environment(Mode.TEST)).build(); + .configure("metadataService.port", String.valueOf(gmsServerPort())) + .configure("auth.baseUrl", "http://localhost:" + providePort()) + .configure( + "auth.oidc.discoveryUri", + "http://localhost:" + + oauthServerPort() + + "/testIssuer/.well-known/openid-configuration") + .in(new Environment(Mode.TEST)) + .build(); } @Override @@ -90,16 +92,20 @@ public int gmsServerPort() { public void init() throws IOException { _gmsServer = new MockWebServer(); _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); - _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); + _gmsServer.enqueue( + new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); _gmsServer.start(gmsServerPort()); _oauthServer = new MockOAuth2Server(); _oauthServer.enqueueCallback( - new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of( - "email", "testUser@myCompany.com", - "groups", "myGroup" - ), 600) - ); + new DefaultOAuth2TokenCallback( + ISSUER_ID, + "testUser", + List.of(), + Map.of( + "email", "testUser@myCompany.com", + "groups", "myGroup"), + 600)); _oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort()); // Discovery url to authorization server metadata @@ -147,8 +153,9 @@ public void testIndexNotFound() { @Test public void testOpenIdConfig() { - assertEquals("http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl); + assertEquals( + "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration", + _wellKnownUrl); } @Test @@ -166,8 +173,13 @@ public void testHappyPathOidc() throws ParseException { Map data = (Map) claims.getClaim("data"); assertEquals(TEST_TOKEN, data.get("token")); assertEquals(TEST_USER, data.get("actor")); - // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes - assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0); + // Default expiration is 24h, so should always be less than current time + 1 day since it stamps + // the time before this executes + assertTrue( + claims + .getExpirationTime() + .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) + < 0); } @Test diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java index 6727513d884af..9bf2b5dd4d11c 100644 --- a/datahub-frontend/test/security/DummyLoginModuleTest.java +++ b/datahub-frontend/test/security/DummyLoginModuleTest.java @@ -1,14 +1,12 @@ package security; -import com.sun.security.auth.callback.TextCallbackHandler; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import com.sun.security.auth.callback.TextCallbackHandler; import java.util.HashMap; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class DummyLoginModuleTest { diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index ed16014b58e59..a27a1462a8a27 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,8 @@ package security; +import static auth.sso.oidc.OidcConfigs.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import auth.sso.oidc.OidcConfigs; import auth.sso.oidc.OidcProvider; import com.typesafe.config.Config; @@ -19,296 +22,290 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; -import static auth.sso.oidc.OidcConfigs.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - - public class OidcConfigurationTest { - private static final com.typesafe.config.Config CONFIG = new Config() { - - private final Map _map = new HashMap<>(); - - @Override - public ConfigObject root() { - return null; - } - - @Override - public ConfigOrigin origin() { - return null; - } - - @Override - public Config withFallback(ConfigMergeable other) { - return null; - } - - @Override - public Config resolve() { - return null; - } - - @Override - public Config resolve(ConfigResolveOptions options) { - return null; - } - - @Override - public boolean isResolved() { - return false; - } - - @Override - public Config resolveWith(Config source) { - return null; - } - - @Override - public Config resolveWith(Config source, ConfigResolveOptions options) { - return null; - } - - @Override - public void checkValid(Config reference, String... restrictToPaths) { - - } - - @Override - public boolean hasPath(String path) { - return true; - } - - @Override - public boolean hasPathOrNull(String path) { - return false; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set> entrySet() { - return null; - } - - @Override - public boolean getIsNull(String path) { - return false; - } - - @Override - public boolean getBoolean(String path) { - return false; - } - - @Override - public Number getNumber(String path) { - return null; - } - - @Override - public int getInt(String path) { - return 0; - } - - @Override - public long getLong(String path) { - return 0; - } - - @Override - public double getDouble(String path) { - return 0; - } - - @Override - public String getString(String path) { - return (String) _map.getOrDefault(path, "1"); - } - - @Override - public > T getEnum(Class enumClass, String path) { - return null; - } - - @Override - public ConfigObject getObject(String path) { - return null; - } - - @Override - public Config getConfig(String path) { - return null; - } - - @Override - public Object getAnyRef(String path) { - return null; - } - - @Override - public ConfigValue getValue(String path) { - return null; - } - - @Override - public Long getBytes(String path) { - return null; - } - - @Override - public ConfigMemorySize getMemorySize(String path) { - return null; - } - - @Override - public Long getMilliseconds(String path) { - return null; - } - - @Override - public Long getNanoseconds(String path) { - return null; - } - - @Override - public long getDuration(String path, TimeUnit unit) { - return 0; - } - - @Override - public Duration getDuration(String path) { - return null; - } - - @Override - public Period getPeriod(String path) { - return null; - } - - @Override - public TemporalAmount getTemporal(String path) { - return null; - } - - @Override - public ConfigList getList(String path) { - return null; - } - - @Override - public List getBooleanList(String path) { - return null; - } - - @Override - public List getNumberList(String path) { - return null; - } - - @Override - public List getIntList(String path) { - return null; - } - - @Override - public List getLongList(String path) { - return null; - } - - @Override - public List getDoubleList(String path) { - return null; - } - - @Override - public List getStringList(String path) { - return null; - } - - @Override - public > List getEnumList(Class enumClass, String path) { - return null; - } - - @Override - public List getObjectList(String path) { - return null; - } - - @Override - public List getConfigList(String path) { - return null; - } - - @Override - public List getAnyRefList(String path) { - return null; - } - - @Override - public List getBytesList(String path) { - return null; - } - - @Override - public List getMemorySizeList(String path) { - return null; - } - - @Override - public List getMillisecondsList(String path) { - return null; - } - - @Override - public List getNanosecondsList(String path) { - return null; - } - - @Override - public List getDurationList(String path, TimeUnit unit) { - return null; - } - - @Override - public List getDurationList(String path) { - return null; - } - - @Override - public Config withOnlyPath(String path) { - return null; - } - - @Override - public Config withoutPath(String path) { - return null; - } - - @Override - public Config atPath(String path) { - return null; - } - - @Override - public Config atKey(String key) { - return null; - } - - @Override - public Config withValue(String path, ConfigValue value) { - _map.put(path, value.unwrapped()); - return this; - } - }; + private static final com.typesafe.config.Config CONFIG = + new Config() { + + private final Map _map = new HashMap<>(); + + @Override + public ConfigObject root() { + return null; + } + + @Override + public ConfigOrigin origin() { + return null; + } + + @Override + public Config withFallback(ConfigMergeable other) { + return null; + } + + @Override + public Config resolve() { + return null; + } + + @Override + public Config resolve(ConfigResolveOptions options) { + return null; + } + + @Override + public boolean isResolved() { + return false; + } + + @Override + public Config resolveWith(Config source) { + return null; + } + + @Override + public Config resolveWith(Config source, ConfigResolveOptions options) { + return null; + } + + @Override + public void checkValid(Config reference, String... restrictToPaths) {} + + @Override + public boolean hasPath(String path) { + return true; + } + + @Override + public boolean hasPathOrNull(String path) { + return false; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set> entrySet() { + return null; + } + + @Override + public boolean getIsNull(String path) { + return false; + } + + @Override + public boolean getBoolean(String path) { + return false; + } + + @Override + public Number getNumber(String path) { + return null; + } + + @Override + public int getInt(String path) { + return 0; + } + + @Override + public long getLong(String path) { + return 0; + } + + @Override + public double getDouble(String path) { + return 0; + } + + @Override + public String getString(String path) { + return (String) _map.getOrDefault(path, "1"); + } + + @Override + public > T getEnum(Class enumClass, String path) { + return null; + } + + @Override + public ConfigObject getObject(String path) { + return null; + } + + @Override + public Config getConfig(String path) { + return null; + } + + @Override + public Object getAnyRef(String path) { + return null; + } + + @Override + public ConfigValue getValue(String path) { + return null; + } + + @Override + public Long getBytes(String path) { + return null; + } + + @Override + public ConfigMemorySize getMemorySize(String path) { + return null; + } + + @Override + public Long getMilliseconds(String path) { + return null; + } + + @Override + public Long getNanoseconds(String path) { + return null; + } + + @Override + public long getDuration(String path, TimeUnit unit) { + return 0; + } + + @Override + public Duration getDuration(String path) { + return null; + } + + @Override + public Period getPeriod(String path) { + return null; + } + + @Override + public TemporalAmount getTemporal(String path) { + return null; + } + + @Override + public ConfigList getList(String path) { + return null; + } + + @Override + public List getBooleanList(String path) { + return null; + } + + @Override + public List getNumberList(String path) { + return null; + } + + @Override + public List getIntList(String path) { + return null; + } + + @Override + public List getLongList(String path) { + return null; + } + + @Override + public List getDoubleList(String path) { + return null; + } + + @Override + public List getStringList(String path) { + return null; + } + + @Override + public > List getEnumList(Class enumClass, String path) { + return null; + } + + @Override + public List getObjectList(String path) { + return null; + } + + @Override + public List getConfigList(String path) { + return null; + } + + @Override + public List getAnyRefList(String path) { + return null; + } + + @Override + public List getBytesList(String path) { + return null; + } + + @Override + public List getMemorySizeList(String path) { + return null; + } + + @Override + public List getMillisecondsList(String path) { + return null; + } + + @Override + public List getNanosecondsList(String path) { + return null; + } + + @Override + public List getDurationList(String path, TimeUnit unit) { + return null; + } + + @Override + public List getDurationList(String path) { + return null; + } + + @Override + public Config withOnlyPath(String path) { + return null; + } + + @Override + public Config withoutPath(String path) { + return null; + } + + @Override + public Config atPath(String path) { + return null; + } + + @Override + public Config atKey(String key) { + return null; + } + + @Override + public Config withValue(String path, ConfigValue value) { + _map.put(path, value.unwrapped()); + return this; + } + }; @Test public void readTimeoutPropagation() { diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java index 428566ae3f424..6767fa5637469 100644 --- a/datahub-frontend/test/utils/SearchUtilTest.java +++ b/datahub-frontend/test/utils/SearchUtilTest.java @@ -1,17 +1,18 @@ package utils; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; + public class SearchUtilTest { - @Test - public void testEscapeForwardSlash() { - // escape "/" - assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); - // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*" - assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); - assertEquals("", ""); - assertEquals("foo", "foo"); - } + @Test + public void testEscapeForwardSlash() { + // escape "/" + assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); + // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to + // retain the regex behaviour with "*" + assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); + assertEquals("", ""); + assertEquals("foo", "foo"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 4488f27c19d80..e45bed33eb023 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql; -/** - * Constants relating to GraphQL type system & execution. - */ +/** Constants relating to GraphQL type system & execution. */ public class Constants { - private Constants() { }; + private Constants() {} + ; - public static final String URN_FIELD_NAME = "urn"; - public static final String URNS_FIELD_NAME = "urns"; - public static final String GMS_SCHEMA_FILE = "entity.graphql"; - public static final String SEARCH_SCHEMA_FILE = "search.graphql"; - public static final String APP_SCHEMA_FILE = "app.graphql"; - public static final String AUTH_SCHEMA_FILE = "auth.graphql"; - public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; - public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; - public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; - public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; - public static final String TESTS_SCHEMA_FILE = "tests.graphql"; - public static final String STEPS_SCHEMA_FILE = "step.graphql"; - public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; - public static final String BROWSE_PATH_DELIMITER = "/"; - public static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - - public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final String URN_FIELD_NAME = "urn"; + public static final String URNS_FIELD_NAME = "urns"; + public static final String GMS_SCHEMA_FILE = "entity.graphql"; + public static final String SEARCH_SCHEMA_FILE = "search.graphql"; + public static final String APP_SCHEMA_FILE = "app.graphql"; + public static final String AUTH_SCHEMA_FILE = "auth.graphql"; + public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; + public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; + public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; + public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; + public static final String TESTS_SCHEMA_FILE = "tests.graphql"; + public static final String STEPS_SCHEMA_FILE = "step.graphql"; + public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String BROWSE_PATH_DELIMITER = "/"; + public static final String BROWSE_PATH_V2_DELIMITER = "␟"; + public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 9ea8126a07ab2..f0cb56b1a99ce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; +import static graphql.scalars.ExtendedScalars.*; + import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; @@ -68,7 +72,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.ListTestsResult; import com.linkedin.datahub.graphql.generated.ListViewsResult; -import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -78,6 +81,7 @@ import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; +import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.Owner; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -284,7 +288,6 @@ import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; import com.linkedin.datahub.graphql.types.domain.DomainType; -import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -297,6 +300,7 @@ import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; +import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; @@ -352,205 +356,191 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; -import static graphql.scalars.ExtendedScalars.*; - - /** - * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph. + * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS + * graph. */ @Slf4j @Getter public class GmsGraphQLEngine { - private final EntityClient entityClient; - private final SystemEntityClient systemEntityClient; - private final GraphClient graphClient; - private final UsageClient usageClient; - private final SiblingGraphService siblingGraphService; - - private final EntityService entityService; - private final AnalyticsService analyticsService; - private final RecommendationsService recommendationsService; - private final EntityRegistry entityRegistry; - private final StatefulTokenService statefulTokenService; - private final SecretService secretService; - private final GitVersion gitVersion; - private final boolean supportsImpactAnalysis; - private final TimeseriesAspectService timeseriesAspectService; - private final TimelineService timelineService; - private final NativeUserService nativeUserService; - private final GroupService groupService; - private final RoleService roleService; - private final InviteTokenService inviteTokenService; - private final PostService postService; - private final SettingsService settingsService; - private final ViewService viewService; - private final OwnershipTypeService ownershipTypeService; - private final LineageService lineageService; - private final QueryService queryService; - private final DataProductService dataProductService; - - private final FeatureFlags featureFlags; - - private final IngestionConfiguration ingestionConfiguration; - private final AuthenticationConfiguration authenticationConfiguration; - private final AuthorizationConfiguration authorizationConfiguration; - private final VisualConfiguration visualConfiguration; - private final TelemetryConfiguration telemetryConfiguration; - private final TestsConfiguration testsConfiguration; - private final DataHubConfiguration datahubConfiguration; - private final ViewsConfiguration viewsConfiguration; - - private final DatasetType datasetType; - - private final RoleType roleType; - - private final CorpUserType corpUserType; - private final CorpGroupType corpGroupType; - private final ChartType chartType; - private final DashboardType dashboardType; - private final DataPlatformType dataPlatformType; - private final TagType tagType; - private final MLModelType mlModelType; - private final MLModelGroupType mlModelGroupType; - private final MLFeatureType mlFeatureType; - private final MLFeatureTableType mlFeatureTableType; - private final MLPrimaryKeyType mlPrimaryKeyType; - private final DataFlowType dataFlowType; - private final DataJobType dataJobType; - private final GlossaryTermType glossaryTermType; - private final GlossaryNodeType glossaryNodeType; - private final AspectType aspectType; - private final ContainerType containerType; - private final DomainType domainType; - private final NotebookType notebookType; - private final AssertionType assertionType; - private final VersionedDatasetType versionedDatasetType; - private final DataPlatformInstanceType dataPlatformInstanceType; - private final AccessTokenMetadataType accessTokenMetadataType; - private final TestType testType; - private final DataHubPolicyType dataHubPolicyType; - private final DataHubRoleType dataHubRoleType; - private final SchemaFieldType schemaFieldType; - private final DataHubViewType dataHubViewType; - private final QueryType queryType; - private final DataProductType dataProductType; - private final OwnershipType ownershipType; - - /** - * A list of GraphQL Plugins that extend the core engine - */ - private final List graphQLPlugins; - - /** - * Configures the graph objects that can be fetched primary key. - */ - public final List> entityTypes; - - /** - * Configures all graph objects - */ - public final List> loadableTypes; - - /** - * Configures the graph objects for owner - */ - public final List> ownerTypes; - - /** - * Configures the graph objects that can be searched. - */ - public final List> searchableTypes; - - /** - * Configures the graph objects that can be browsed. - */ - public final List> browsableTypes; - - public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { - - this.graphQLPlugins = List.of( + private final EntityClient entityClient; + private final SystemEntityClient systemEntityClient; + private final GraphClient graphClient; + private final UsageClient usageClient; + private final SiblingGraphService siblingGraphService; + + private final EntityService entityService; + private final AnalyticsService analyticsService; + private final RecommendationsService recommendationsService; + private final EntityRegistry entityRegistry; + private final StatefulTokenService statefulTokenService; + private final SecretService secretService; + private final GitVersion gitVersion; + private final boolean supportsImpactAnalysis; + private final TimeseriesAspectService timeseriesAspectService; + private final TimelineService timelineService; + private final NativeUserService nativeUserService; + private final GroupService groupService; + private final RoleService roleService; + private final InviteTokenService inviteTokenService; + private final PostService postService; + private final SettingsService settingsService; + private final ViewService viewService; + private final OwnershipTypeService ownershipTypeService; + private final LineageService lineageService; + private final QueryService queryService; + private final DataProductService dataProductService; + + private final FeatureFlags featureFlags; + + private final IngestionConfiguration ingestionConfiguration; + private final AuthenticationConfiguration authenticationConfiguration; + private final AuthorizationConfiguration authorizationConfiguration; + private final VisualConfiguration visualConfiguration; + private final TelemetryConfiguration telemetryConfiguration; + private final TestsConfiguration testsConfiguration; + private final DataHubConfiguration datahubConfiguration; + private final ViewsConfiguration viewsConfiguration; + + private final DatasetType datasetType; + + private final RoleType roleType; + + private final CorpUserType corpUserType; + private final CorpGroupType corpGroupType; + private final ChartType chartType; + private final DashboardType dashboardType; + private final DataPlatformType dataPlatformType; + private final TagType tagType; + private final MLModelType mlModelType; + private final MLModelGroupType mlModelGroupType; + private final MLFeatureType mlFeatureType; + private final MLFeatureTableType mlFeatureTableType; + private final MLPrimaryKeyType mlPrimaryKeyType; + private final DataFlowType dataFlowType; + private final DataJobType dataJobType; + private final GlossaryTermType glossaryTermType; + private final GlossaryNodeType glossaryNodeType; + private final AspectType aspectType; + private final ContainerType containerType; + private final DomainType domainType; + private final NotebookType notebookType; + private final AssertionType assertionType; + private final VersionedDatasetType versionedDatasetType; + private final DataPlatformInstanceType dataPlatformInstanceType; + private final AccessTokenMetadataType accessTokenMetadataType; + private final TestType testType; + private final DataHubPolicyType dataHubPolicyType; + private final DataHubRoleType dataHubRoleType; + private final SchemaFieldType schemaFieldType; + private final DataHubViewType dataHubViewType; + private final QueryType queryType; + private final DataProductType dataProductType; + private final OwnershipType ownershipType; + + /** A list of GraphQL Plugins that extend the core engine */ + private final List graphQLPlugins; + + /** Configures the graph objects that can be fetched primary key. */ + public final List> entityTypes; + + /** Configures all graph objects */ + public final List> loadableTypes; + + /** Configures the graph objects for owner */ + public final List> ownerTypes; + + /** Configures the graph objects that can be searched. */ + public final List> searchableTypes; + + /** Configures the graph objects that can be browsed. */ + public final List> browsableTypes; + + public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { + + this.graphQLPlugins = + List.of( // Add new plugins here - ); - - this.graphQLPlugins.forEach(plugin -> plugin.init(args)); - - this.entityClient = args.entityClient; - this.systemEntityClient = args.systemEntityClient; - this.graphClient = args.graphClient; - this.usageClient = args.usageClient; - this.siblingGraphService = args.siblingGraphService; - - this.analyticsService = args.analyticsService; - this.entityService = args.entityService; - this.recommendationsService = args.recommendationsService; - this.statefulTokenService = args.statefulTokenService; - this.secretService = args.secretService; - this.entityRegistry = args.entityRegistry; - this.gitVersion = args.gitVersion; - this.supportsImpactAnalysis = args.supportsImpactAnalysis; - this.timeseriesAspectService = args.timeseriesAspectService; - this.timelineService = args.timelineService; - this.nativeUserService = args.nativeUserService; - this.groupService = args.groupService; - this.roleService = args.roleService; - this.inviteTokenService = args.inviteTokenService; - this.postService = args.postService; - this.viewService = args.viewService; - this.ownershipTypeService = args.ownershipTypeService; - this.settingsService = args.settingsService; - this.lineageService = args.lineageService; - this.queryService = args.queryService; - this.dataProductService = args.dataProductService; - - this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); - this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); - this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); - this.visualConfiguration = args.visualConfiguration; - this.telemetryConfiguration = args.telemetryConfiguration; - this.testsConfiguration = args.testsConfiguration; - this.datahubConfiguration = args.datahubConfiguration; - this.viewsConfiguration = args.viewsConfiguration; - this.featureFlags = args.featureFlags; - - this.datasetType = new DatasetType(entityClient); - this.roleType = new RoleType(entityClient); - this.corpUserType = new CorpUserType(entityClient, featureFlags); - this.corpGroupType = new CorpGroupType(entityClient); - this.chartType = new ChartType(entityClient); - this.dashboardType = new DashboardType(entityClient); - this.dataPlatformType = new DataPlatformType(entityClient); - this.tagType = new TagType(entityClient); - this.mlModelType = new MLModelType(entityClient); - this.mlModelGroupType = new MLModelGroupType(entityClient); - this.mlFeatureType = new MLFeatureType(entityClient); - this.mlFeatureTableType = new MLFeatureTableType(entityClient); - this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); - this.dataFlowType = new DataFlowType(entityClient); - this.dataJobType = new DataJobType(entityClient); - this.glossaryTermType = new GlossaryTermType(entityClient); - this.glossaryNodeType = new GlossaryNodeType(entityClient); - this.aspectType = new AspectType(entityClient); - this.containerType = new ContainerType(entityClient); - this.domainType = new DomainType(entityClient); - this.notebookType = new NotebookType(entityClient); - this.assertionType = new AssertionType(entityClient); - this.versionedDatasetType = new VersionedDatasetType(entityClient); - this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); - this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); - this.testType = new TestType(entityClient); - this.dataHubPolicyType = new DataHubPolicyType(entityClient); - this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); - this.dataHubViewType = new DataHubViewType(entityClient); - this.queryType = new QueryType(entityClient); - this.dataProductType = new DataProductType(entityClient); - this.ownershipType = new OwnershipType(entityClient); - - // Init Lists - this.entityTypes = ImmutableList.of( + ); + + this.graphQLPlugins.forEach(plugin -> plugin.init(args)); + + this.entityClient = args.entityClient; + this.systemEntityClient = args.systemEntityClient; + this.graphClient = args.graphClient; + this.usageClient = args.usageClient; + this.siblingGraphService = args.siblingGraphService; + + this.analyticsService = args.analyticsService; + this.entityService = args.entityService; + this.recommendationsService = args.recommendationsService; + this.statefulTokenService = args.statefulTokenService; + this.secretService = args.secretService; + this.entityRegistry = args.entityRegistry; + this.gitVersion = args.gitVersion; + this.supportsImpactAnalysis = args.supportsImpactAnalysis; + this.timeseriesAspectService = args.timeseriesAspectService; + this.timelineService = args.timelineService; + this.nativeUserService = args.nativeUserService; + this.groupService = args.groupService; + this.roleService = args.roleService; + this.inviteTokenService = args.inviteTokenService; + this.postService = args.postService; + this.viewService = args.viewService; + this.ownershipTypeService = args.ownershipTypeService; + this.settingsService = args.settingsService; + this.lineageService = args.lineageService; + this.queryService = args.queryService; + this.dataProductService = args.dataProductService; + + this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); + this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); + this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); + this.visualConfiguration = args.visualConfiguration; + this.telemetryConfiguration = args.telemetryConfiguration; + this.testsConfiguration = args.testsConfiguration; + this.datahubConfiguration = args.datahubConfiguration; + this.viewsConfiguration = args.viewsConfiguration; + this.featureFlags = args.featureFlags; + + this.datasetType = new DatasetType(entityClient); + this.roleType = new RoleType(entityClient); + this.corpUserType = new CorpUserType(entityClient, featureFlags); + this.corpGroupType = new CorpGroupType(entityClient); + this.chartType = new ChartType(entityClient); + this.dashboardType = new DashboardType(entityClient); + this.dataPlatformType = new DataPlatformType(entityClient); + this.tagType = new TagType(entityClient); + this.mlModelType = new MLModelType(entityClient); + this.mlModelGroupType = new MLModelGroupType(entityClient); + this.mlFeatureType = new MLFeatureType(entityClient); + this.mlFeatureTableType = new MLFeatureTableType(entityClient); + this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); + this.dataFlowType = new DataFlowType(entityClient); + this.dataJobType = new DataJobType(entityClient); + this.glossaryTermType = new GlossaryTermType(entityClient); + this.glossaryNodeType = new GlossaryNodeType(entityClient); + this.aspectType = new AspectType(entityClient); + this.containerType = new ContainerType(entityClient); + this.domainType = new DomainType(entityClient); + this.notebookType = new NotebookType(entityClient); + this.assertionType = new AssertionType(entityClient); + this.versionedDatasetType = new VersionedDatasetType(entityClient); + this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); + this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); + this.testType = new TestType(entityClient); + this.dataHubPolicyType = new DataHubPolicyType(entityClient); + this.dataHubRoleType = new DataHubRoleType(entityClient); + this.schemaFieldType = new SchemaFieldType(); + this.dataHubViewType = new DataHubViewType(entityClient); + this.queryType = new QueryType(entityClient); + this.dataProductType = new DataProductType(entityClient); + this.ownershipType = new OwnershipType(entityClient); + + // Init Lists + this.entityTypes = + ImmutableList.of( datasetType, roleType, corpUserType, @@ -582,1262 +572,1867 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType - ); - this.loadableTypes = new ArrayList<>(entityTypes); - // Extend loadable types with types from the plugins - // This allows us to offer search and browse capabilities out of the box for those types - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - this.loadableTypes.addAll(pluginLoadableTypes); - } - } - this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); - this.searchableTypes = loadableTypes.stream() + ownershipType); + this.loadableTypes = new ArrayList<>(entityTypes); + // Extend loadable types with types from the plugins + // This allows us to offer search and browse capabilities out of the box for those types + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + this.loadableTypes.addAll(pluginLoadableTypes); + } + } + this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); + this.searchableTypes = + loadableTypes.stream() .filter(type -> (type instanceof SearchableEntityType)) .map(type -> (SearchableEntityType) type) .collect(Collectors.toList()); - this.browsableTypes = loadableTypes.stream() + this.browsableTypes = + loadableTypes.stream() .filter(type -> (type instanceof BrowsableEntityType)) .map(type -> (BrowsableEntityType) type) .collect(Collectors.toList()); - } + } - /** - * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from - * a {@link LoadableType}. - */ - public Map>> loaderSuppliers(final Collection> loadableTypes) { - return loadableTypes - .stream() - .collect(Collectors.toMap( + /** + * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link + * LoadableType}. + */ + public Map>> loaderSuppliers( + final Collection> loadableTypes) { + return loadableTypes.stream() + .collect( + Collectors.toMap( LoadableType::name, - (graphType) -> (context) -> createDataLoader(graphType, context) - )); - } + (graphType) -> (context) -> createDataLoader(graphType, context))); + } - /** - * Final call to wire up any extra resolvers the plugin might want to add on - * @param builder - */ - private void configurePluginResolvers(final RuntimeWiring.Builder builder) { - this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); - } - - - public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { - configureQueryResolvers(builder); - configureMutationResolvers(builder); - configureGenericEntityResolvers(builder); - configureDatasetResolvers(builder); - configureCorpUserResolvers(builder); - configureCorpGroupResolvers(builder); - configureDashboardResolvers(builder); - configureNotebookResolvers(builder); - configureChartResolvers(builder); - configureTypeResolvers(builder); - configureTypeExtensions(builder); - configureTagAssociationResolver(builder); - configureGlossaryTermAssociationResolver(builder); - configureDataJobResolvers(builder); - configureDataFlowResolvers(builder); - configureMLFeatureTableResolvers(builder); - configureGlossaryRelationshipResolvers(builder); - configureIngestionSourceResolvers(builder); - configureAnalyticsResolvers(builder); - configureContainerResolvers(builder); - configureDataPlatformInstanceResolvers(builder); - configureGlossaryTermResolvers(builder); - configureOrganisationRoleResolvers(builder); - configureGlossaryNodeResolvers(builder); - configureDomainResolvers(builder); - configureDataProductResolvers(builder); - configureAssertionResolvers(builder); - configurePolicyResolvers(builder); - configureDataProcessInstanceResolvers(builder); - configureVersionedDatasetResolvers(builder); - configureAccessAccessTokenMetadataResolvers(builder); - configureTestResultResolvers(builder); - configureRoleResolvers(builder); - configureSchemaFieldResolvers(builder); - configureEntityPathResolvers(builder); - configureViewResolvers(builder); - configureQueryEntityResolvers(builder); - configureOwnershipTypeResolver(builder); - configurePluginResolvers(builder); - } - - private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { - builder.type("Role", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("RoleAssociation", typeWiring -> typeWiring - .dataFetcher("role", - new LoadableTypeResolver<>(roleType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation) - env.getSource()).getRole().getUrn())) - ); - builder.type("RoleUser", typeWiring -> typeWiring - .dataFetcher("user", - new LoadableTypeResolver<>(corpUserType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser) - env.getSource()).getUser().getUrn())) - ); + /** + * Final call to wire up any extra resolvers the plugin might want to add on + * + * @param builder + */ + private void configurePluginResolvers(final RuntimeWiring.Builder builder) { + this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); + } + + public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { + configureQueryResolvers(builder); + configureMutationResolvers(builder); + configureGenericEntityResolvers(builder); + configureDatasetResolvers(builder); + configureCorpUserResolvers(builder); + configureCorpGroupResolvers(builder); + configureDashboardResolvers(builder); + configureNotebookResolvers(builder); + configureChartResolvers(builder); + configureTypeResolvers(builder); + configureTypeExtensions(builder); + configureTagAssociationResolver(builder); + configureGlossaryTermAssociationResolver(builder); + configureDataJobResolvers(builder); + configureDataFlowResolvers(builder); + configureMLFeatureTableResolvers(builder); + configureGlossaryRelationshipResolvers(builder); + configureIngestionSourceResolvers(builder); + configureAnalyticsResolvers(builder); + configureContainerResolvers(builder); + configureDataPlatformInstanceResolvers(builder); + configureGlossaryTermResolvers(builder); + configureOrganisationRoleResolvers(builder); + configureGlossaryNodeResolvers(builder); + configureDomainResolvers(builder); + configureDataProductResolvers(builder); + configureAssertionResolvers(builder); + configurePolicyResolvers(builder); + configureDataProcessInstanceResolvers(builder); + configureVersionedDatasetResolvers(builder); + configureAccessAccessTokenMetadataResolvers(builder); + configureTestResultResolvers(builder); + configureRoleResolvers(builder); + configureSchemaFieldResolvers(builder); + configureEntityPathResolvers(builder); + configureViewResolvers(builder); + configureQueryEntityResolvers(builder); + configureOwnershipTypeResolver(builder); + configurePluginResolvers(builder); + } + + private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "RoleAssociation", + typeWiring -> + typeWiring.dataFetcher( + "role", + new LoadableTypeResolver<>( + roleType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource()) + .getRole() + .getUrn()))); + builder.type( + "RoleUser", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource()) + .getUser() + .getUrn()))); + } + + public GraphQLEngine.Builder builder() { + final GraphQLEngine.Builder builder = GraphQLEngine.builder(); + builder + .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) + .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + List pluginSchemaFiles = plugin.getSchemaFiles(); + if (pluginSchemaFiles != null) { + pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); + } + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + pluginLoadableTypes.forEach( + loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); + } } - - public GraphQLEngine.Builder builder() { - final GraphQLEngine.Builder builder = GraphQLEngine.builder(); - builder - .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) - .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); - - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - List pluginSchemaFiles = plugin.getSchemaFiles(); - if (pluginSchemaFiles != null) { - pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); - } - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); - } - } - builder - .addDataLoaders(loaderSuppliers(loadableTypes)) - .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); - return builder; + builder + .addDataLoaders(loaderSuppliers(loadableTypes)) + .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) + .configureRuntimeWiring(this::configureRuntimeWiring); + return builder; + } + + public static String fileBasedSchema(String fileName) { + String schema; + try { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); + schema = IOUtils.toString(is, StandardCharsets.UTF_8); + is.close(); + } catch (IOException e) { + throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); } - - public static String fileBasedSchema(String fileName) { - String schema; - try { - InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); - schema = IOUtils.toString(is, StandardCharsets.UTF_8); - is.close(); - } catch (IOException e) { - throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); - } - return schema; + return schema; + } + + private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { + final boolean isAnalyticsEnabled = analyticsService != null; + builder + .type( + "Query", + typeWiring -> + typeWiring.dataFetcher( + "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) + .type( + "AnalyticsChart", + typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); + if (isAnalyticsEnabled) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient)) + .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) + .dataFetcher( + "getMetadataAnalyticsCharts", + new GetMetadataAnalyticsResolver(entityClient))); } + } - private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { - final boolean isAnalyticsEnabled = analyticsService != null; - builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) - .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); - if (isAnalyticsEnabled) { - builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", - new GetChartsResolver(analyticsService, entityClient)) - .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) - .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient))); - } - } - - private void configureContainerResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Container", typeWiring -> typeWiring + private void configureContainerResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Container", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, (env) -> ((Container) env.getSource()).getPlatform().getUrn())) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Container container = env.getSource(); - return container.getContainer() != null ? container.getContainer().getUrn() : null; - }) - ) + final Container container = env.getSource(); + return container.getContainer() != null + ? container.getContainer().getUrn() + : null; + })) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Container container = env.getSource(); - return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataPlatformInstance", typeWiring -> typeWiring - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())) - ); - } - - private void configureQueryResolvers(final RuntimeWiring.Builder builder) { - builder.type("Query", typeWiring -> typeWiring - .dataFetcher("appConfig", - new AppConfigResolver(gitVersion, analyticsService != null, - this.ingestionConfiguration, - this.authenticationConfiguration, - this.authorizationConfiguration, - this.supportsImpactAnalysis, - this.visualConfiguration, - this.telemetryConfiguration, - this.testsConfiguration, - this.datahubConfiguration, - this.viewsConfiguration, - this.featureFlags - )) - .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) - .dataFetcher("search", new SearchResolver(this.entityClient)) - .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) - .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) - .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) - .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) - .dataFetcher("browse", new BrowseResolver(browsableTypes)) - .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) - .dataFetcher("dataset", getResolver(datasetType)) - .dataFetcher("role", getResolver(roleType)) - .dataFetcher("versionedDataset", getResolver(versionedDatasetType, - (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) - .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) - .dataFetcher("notebook", getResolver(notebookType)) - .dataFetcher("corpUser", getResolver(corpUserType)) - .dataFetcher("corpGroup", getResolver(corpGroupType)) - .dataFetcher("dashboard", getResolver(dashboardType)) - .dataFetcher("chart", getResolver(chartType)) - .dataFetcher("tag", getResolver(tagType)) - .dataFetcher("dataFlow", getResolver(dataFlowType)) - .dataFetcher("dataJob", getResolver(dataJobType)) - .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) - .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) - .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("dataPlatform", getResolver(dataPlatformType)) - .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) - .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) - .dataFetcher("mlFeature", getResolver(mlFeatureType)) - .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) - .dataFetcher("mlModel", getResolver(mlModelType)) - .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) - .dataFetcher("assertion", getResolver(assertionType)) - .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) - .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) - .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) - .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) - .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService)) - .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) - .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) - .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) - .dataFetcher("container", getResolver(containerType)) - .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) - .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) - .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService)) - .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) - .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) - .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) - .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) - .dataFetcher("test", getResolver(testType)) - .dataFetcher("listTests", new ListTestsResolver(entityClient)) - .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) - .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) - .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) - .dataFetcher("entity", getEntityResolver()) - .dataFetcher("entities", getEntitiesResolver()) - .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) - .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) - .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) - .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) - .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) - .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) - .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService)) - .dataFetcher("dataProduct", getResolver(dataProductType)) - .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) - .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)) - ); - } - - private DataFetcher getEntitiesResolver() { - return new BatchGetEntitiesResolver(entityTypes, - (env) -> { - List urns = env.getArgument(URNS_FIELD_NAME); - return urns.stream().map((urn) -> { + final Container container = env.getSource(); + return container.getDataPlatformInstance() != null + ? container.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataPlatformInstance", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))); + } + + private void configureQueryResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "appConfig", + new AppConfigResolver( + gitVersion, + analyticsService != null, + this.ingestionConfiguration, + this.authenticationConfiguration, + this.authorizationConfiguration, + this.supportsImpactAnalysis, + this.visualConfiguration, + this.telemetryConfiguration, + this.testsConfiguration, + this.datahubConfiguration, + this.viewsConfiguration, + this.featureFlags)) + .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) + .dataFetcher("search", new SearchResolver(this.entityClient)) + .dataFetcher( + "searchAcrossEntities", + new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "scrollAcrossEntities", + new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "aggregateAcrossEntities", + new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) + .dataFetcher( + "autoCompleteForMultiple", + new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) + .dataFetcher("browse", new BrowseResolver(browsableTypes)) + .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) + .dataFetcher("dataset", getResolver(datasetType)) + .dataFetcher("role", getResolver(roleType)) + .dataFetcher( + "versionedDataset", + getResolver( + versionedDatasetType, + (env) -> + new VersionedUrn() + .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) + .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) + .dataFetcher("notebook", getResolver(notebookType)) + .dataFetcher("corpUser", getResolver(corpUserType)) + .dataFetcher("corpGroup", getResolver(corpGroupType)) + .dataFetcher("dashboard", getResolver(dashboardType)) + .dataFetcher("chart", getResolver(chartType)) + .dataFetcher("tag", getResolver(tagType)) + .dataFetcher("dataFlow", getResolver(dataFlowType)) + .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) + .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) + .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("dataPlatform", getResolver(dataPlatformType)) + .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) + .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) + .dataFetcher("mlFeature", getResolver(mlFeatureType)) + .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) + .dataFetcher("mlModel", getResolver(mlModelType)) + .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) + .dataFetcher("assertion", getResolver(assertionType)) + .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) + .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) + .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) + .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) + .dataFetcher( + "listRecommendations", new ListRecommendationsResolver(recommendationsService)) + .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) + .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) + .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher("container", getResolver(containerType)) + .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) + .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) + .dataFetcher( + "getSecretValues", + new GetSecretValuesResolver(this.entityClient, this.secretService)) + .dataFetcher( + "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) + .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) + .dataFetcher( + "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) + .dataFetcher("test", getResolver(testType)) + .dataFetcher("listTests", new ListTestsResolver(entityClient)) + .dataFetcher( + "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) + .dataFetcher( + "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) + .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("entity", getEntityResolver()) + .dataFetcher("entities", getEntitiesResolver()) + .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) + .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) + .dataFetcher( + "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) + .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) + .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) + .dataFetcher( + "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) + .dataFetcher( + "getQuickFilters", + new GetQuickFiltersResolver(this.entityClient, this.viewService)) + .dataFetcher("dataProduct", getResolver(dataProductType)) + .dataFetcher( + "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) + .dataFetcher( + "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + } + + private DataFetcher getEntitiesResolver() { + return new BatchGetEntitiesResolver( + entityTypes, + (env) -> { + List urns = env.getArgument(URNS_FIELD_NAME); + return urns.stream() + .map( + (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + Urn entityUrn = Urn.createFromString(urn); + return UrnToEntityMapper.map(entityUrn); } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); + throw new RuntimeException("Failed to get entity", e); } - }).collect(Collectors.toList()); - }); - } + }) + .collect(Collectors.toList()); + }); + } + + private DataFetcher getEntityResolver() { + return new EntityTypeResolver( + entityTypes, + (env) -> { + try { + Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); + return UrnToEntityMapper.map(urn); + } catch (Exception e) { + throw new RuntimeException("Failed to get entity", e); + } + }); + } + + private DataFetcher getResolver(LoadableType loadableType) { + return getResolver(loadableType, this::getUrnField); + } + + private DataFetcher getResolver( + LoadableType loadableType, Function keyProvider) { + return new LoadableTypeResolver<>(loadableType, keyProvider); + } + + private String getUrnField(DataFetchingEnvironment env) { + return env.getArgument(URN_FIELD_NAME); + } + + private void configureMutationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> + typeWiring + .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) + .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) + .dataFetcher( + "createTag", new CreateTagResolver(this.entityClient, this.entityService)) + .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) + .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) + .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) + .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) + .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) + .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) + .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) + .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) + .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) + .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher("addTag", new AddTagResolver(entityService)) + .dataFetcher("addTags", new AddTagsResolver(entityService)) + .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) + .dataFetcher("removeTag", new RemoveTagResolver(entityService)) + .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) + .dataFetcher("addTerm", new AddTermResolver(entityService)) + .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) + .dataFetcher("addTerms", new AddTermsResolver(entityService)) + .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) + .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) + .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) + .dataFetcher( + "updateDescription", + new UpdateDescriptionResolver(entityService, this.entityClient)) + .dataFetcher("addOwner", new AddOwnerResolver(entityService)) + .dataFetcher("addOwners", new AddOwnersResolver(entityService)) + .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) + .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) + .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) + .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) + .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) + .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) + .dataFetcher( + "removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) + .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) + .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) + .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) + .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) + .dataFetcher( + "createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) + .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) + .dataFetcher( + "setDomain", new SetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) + .dataFetcher( + "updateDeprecation", + new UpdateDeprecationResolver(this.entityClient, this.entityService)) + .dataFetcher( + "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) + .dataFetcher( + "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) + .dataFetcher( + "revokeAccessToken", + new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) + .dataFetcher( + "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "createIngestionExecutionRequest", + new CreateIngestionExecutionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "cancelIngestionExecutionRequest", + new CancelIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher( + "createTestConnectionRequest", + new CreateTestConnectionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "deleteAssertion", + new DeleteAssertionResolver(this.entityClient, this.entityService)) + .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) + .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) + .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) + .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) + .dataFetcher( + "createGlossaryTerm", + new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createGlossaryNode", + new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateParentNode", + new UpdateParentNodeResolver(this.entityService, this.entityClient)) + .dataFetcher( + "deleteGlossaryEntity", + new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher( + "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + .dataFetcher( + "createNativeUserResetToken", + new CreateNativeUserResetTokenResolver(this.nativeUserService)) + .dataFetcher( + "batchUpdateSoftDeleted", + new BatchUpdateSoftDeletedResolver(this.entityService)) + .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) + .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) + .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) + .dataFetcher( + "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) + .dataFetcher( + "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) + .dataFetcher("createPost", new CreatePostResolver(this.postService)) + .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher( + "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) + .dataFetcher("createView", new CreateViewResolver(this.viewService)) + .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) + .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) + .dataFetcher( + "updateGlobalViewsSettings", + new UpdateGlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateCorpUserViewsSettings", + new UpdateCorpUserViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateLineage", + new UpdateLineageResolver(this.entityService, this.lineageService)) + .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) + .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) + .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) + .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) + .dataFetcher( + "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + .dataFetcher( + "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) + .dataFetcher( + "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) + .dataFetcher( + "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) + .dataFetcher( + "createOwnershipType", + new CreateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "updateOwnershipType", + new UpdateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "deleteOwnershipType", + new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + } + + private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "SearchResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity()))) + .type( + "MatchedField", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity()))) + .type( + "SearchAcrossLineageResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))) + .type( + "AggregationMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity()))) + .type( + "RecommendationContent", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((RecommendationContent) env.getSource()).getEntity()))) + .type( + "BrowseResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities()))) + .type( + "ParentDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new EntityTypeBatchResolver( + entityTypes, + (env) -> { + final ParentDomainsResult result = env.getSource(); + return result != null ? result.getDomains() : null; + }))) + .type( + "EntityRelationshipLegacy", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))) + .type( + "EntityRelationship", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity()))) + .type( + "BrowseResultGroupV2", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))) + .type( + "BrowsePathEntry", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "LineageRelationship", + typeWiring -> + typeWiring + .dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((LineageRelationship) env.getSource()).getEntity())) + .dataFetcher( + "createdActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getCreatedActor() != null + ? relationship.getCreatedActor() + : null; + })) + .dataFetcher( + "updatedActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getUpdatedActor() != null + ? relationship.getUpdatedActor() + : null; + }))) + .type( + "ListDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new LoadableTypeBatchResolver<>( + domainType, + (env) -> + ((ListDomainsResult) env.getSource()) + .getDomains().stream() + .map(Domain::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryTermsResult", + typeWiring -> + typeWiring.dataFetcher( + "terms", + new LoadableTypeBatchResolver<>( + glossaryTermType, + (env) -> + ((GetRootGlossaryTermsResult) env.getSource()) + .getTerms().stream() + .map(GlossaryTerm::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryNodesResult", + typeWiring -> + typeWiring.dataFetcher( + "nodes", + new LoadableTypeBatchResolver<>( + glossaryNodeType, + (env) -> + ((GetRootGlossaryNodesResult) env.getSource()) + .getNodes().stream() + .map(GlossaryNode::getUrn) + .collect(Collectors.toList())))) + .type( + "AutoCompleteResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))) + .type( + "AutoCompleteResultForEntity", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))) + .type( + "PolicyMatchCriterionValue", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))) + .type( + "ListTestsResult", + typeWiring -> + typeWiring.dataFetcher( + "tests", + new LoadableTypeBatchResolver<>( + testType, + (env) -> + ((ListTestsResult) env.getSource()) + .getTests().stream() + .map(Test::getUrn) + .collect(Collectors.toList())))) + .type( + "QuickFilter", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity()))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "ownershipType", + new EntityTypeResolver( + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + } - private DataFetcher getEntityResolver() { - return new EntityTypeResolver(entityTypes, - (env) -> { - try { - Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); - } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); - } - }); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dataset} type. + */ + private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Dataset", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getContainer() != null + ? dataset.getContainer().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getDataPlatformInstance() != null + ? dataset.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "datasetProfiles", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "datasetProfile", + DatasetProfileMapper::map)) + .dataFetcher( + "operations", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "operation", + OperationMapper::map, + new SortCriterion() + .setField(OPERATION_EVENT_TIME_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) + .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) + .dataFetcher( + "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher( + "assertions", new EntityAssertionsResolver(entityClient, graphClient)) + .dataFetcher("testResults", new TestResultsResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("runs", new EntityRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "UserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))) + .type( + "ForeignKeyConstraint", + typeWiring -> + typeWiring.dataFetcher( + "foreignDataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> + ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))) + .type( + "SiblingProperties", + typeWiring -> + typeWiring.dataFetcher( + "siblings", + new EntityTypeBatchResolver( + new ArrayList<>(entityTypes), + (env) -> ((SiblingProperties) env.getSource()).getSiblings()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))) + .type( + "DatasetStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } - private DataFetcher getResolver(LoadableType loadableType) { - return getResolver(loadableType, this::getUrnField); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.VersionedDataset} type. + */ + private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionedDataset", + typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null))); + } - private DataFetcher getResolver(LoadableType loadableType, - Function keyProvider) { - return new LoadableTypeResolver<>(loadableType, keyProvider); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. + */ + private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "AccessToken", + typeWiring -> + typeWiring.dataFetcher( + "metadata", + new LoadableTypeResolver<>( + accessTokenMetadataType, + (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))); + builder.type( + "ListAccessTokenResult", + typeWiring -> + typeWiring.dataFetcher( + "tokens", + new LoadableTypeBatchResolver<>( + accessTokenMetadataType, + (env) -> + ((ListAccessTokenResult) env.getSource()) + .getTokens().stream() + .map(AccessTokenMetadata::getUrn) + .collect(Collectors.toList())))); + } + + private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTerm", + typeWiring -> + typeWiring + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryNode", + typeWiring -> + typeWiring + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "SchemaFieldEntity", + typeWiring -> + typeWiring.dataFetcher( + "parent", + new EntityTypeResolver( + entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))); + } + + private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "EntityPath", + typeWiring -> + typeWiring.dataFetcher( + "path", + new BatchGetEntitiesResolver( + entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); + } - private String getUrnField(DataFetchingEnvironment env) { - return env.getArgument(URN_FIELD_NAME); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpUser} type. + */ + private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpUser", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "CorpUserInfo", + typeWiring -> + typeWiring.dataFetcher( + "manager", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))); + } - private void configureMutationResolvers(final RuntimeWiring.Builder builder) { - builder.type("Mutation", typeWiring -> typeWiring - .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) - .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) - .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService)) - .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) - .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) - .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) - .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) - .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) - .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) - .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) - .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) - .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) - .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("addTag", new AddTagResolver(entityService)) - .dataFetcher("addTags", new AddTagsResolver(entityService)) - .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) - .dataFetcher("removeTag", new RemoveTagResolver(entityService)) - .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) - .dataFetcher("addTerm", new AddTermResolver(entityService)) - .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) - .dataFetcher("addTerms", new AddTermsResolver(entityService)) - .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) - .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) - .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) - .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient)) - .dataFetcher("addOwner", new AddOwnerResolver(entityService)) - .dataFetcher("addOwners", new AddOwnersResolver(entityService)) - .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) - .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) - .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) - .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) - .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) - .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) - .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) - .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) - .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) - .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) - .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) - .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) - .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService)) - .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) - .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) - .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) - .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) - .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) - .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) - .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService)) - .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) - .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) - .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) - .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) - .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteGlossaryEntity", - new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) - .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) - .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) - .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) - .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) - .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) - .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) - .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) - .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - .dataFetcher("createPost", new CreatePostResolver(this.postService)) - .dataFetcher("deletePost", new DeletePostResolver(this.postService)) - .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) - .dataFetcher("createView", new CreateViewResolver(this.viewService)) - .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) - .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) - .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService)) - .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) - .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) - .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) - .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) - .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService)) - .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) - .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) - .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) - .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) - ); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpGroup} type. + */ + private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpGroup", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder + .type( + "CorpGroupInfo", + typeWiring -> + typeWiring + .dataFetcher( + "admins", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getAdmins().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()))) + .dataFetcher( + "members", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getMembers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList())))) + .type( + "ListGroupsResult", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> + ((ListGroupsResult) env.getSource()) + .getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList())))); + } + + private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "Tag", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "TagAssociation", + typeWiring -> + typeWiring.dataFetcher( + "tag", + new LoadableTypeResolver<>( + tagType, + (env) -> + ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()) + .getTag() + .getUrn()))); + } + + private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTermAssociation", + typeWiring -> + typeWiring.dataFetcher( + "term", + new LoadableTypeResolver<>( + glossaryTermType, + (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))); + } - private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("SearchResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchResult) env.getSource()).getEntity())) - ) - .type("MatchedField", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((MatchedField) env.getSource()).getEntity())) - ) - .type("SearchAcrossLineageResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())) - ) - .type("AggregationMetadata", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((AggregationMetadata) env.getSource()).getEntity())) - ) - .type("RecommendationContent", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((RecommendationContent) env.getSource()).getEntity())) - ) - .type("BrowseResults", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((BrowseResults) env.getSource()).getEntities())) - ) - .type("ParentDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes, - (env) -> { - final ParentDomainsResult result = env.getSource(); - return result != null ? result.getDomains() : null; - })) - ) - .type("EntityRelationshipLegacy", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())) - ) - .type("EntityRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationship) env.getSource()).getEntity())) - ) - .type("BrowseResultGroupV2", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())) - ) - .type("BrowsePathEntry", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowsePathEntry) env.getSource()).getEntity())) - ) - .type("LineageRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((LineageRelationship) env.getSource()).getEntity())) - .dataFetcher("createdActor", - new EntityTypeResolver(entityTypes, - (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null; - }) - ) - .dataFetcher("updatedActor", - new EntityTypeResolver(entityTypes, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Notebook} type. + */ + private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Notebook", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null; - }) - ) - ) - .type("ListDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType, - (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream() - .map(Domain::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring - .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType, - (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream() - .map(GlossaryTerm::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring - .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType, - (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream() - .map(GlossaryNode::getUrn) - .collect(Collectors.toList()))) - ) - .type("AutoCompleteResults", typeWiring -> typeWiring - .dataFetcher("entities", - new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResults) env.getSource()).getEntities())) - ) - .type("AutoCompleteResultForEntity", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())) - ) - .type("PolicyMatchCriterionValue", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())) - ) - .type("ListTestsResult", typeWiring -> typeWiring - .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType, - (env) -> ((ListTestsResult) env.getSource()).getTests().stream() - .map(Test::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuickFilter", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((QuickFilter) env.getSource()).getEntity())) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes, - (env) -> ((Owner) env.getSource()).getOwnershipType())) - ); - } + final Notebook notebook = env.getSource(); + return notebook.getDataPlatformInstance() != null + ? notebook.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type. - */ - private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Dataset", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dashboard} type. + */ + private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dashboard", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null; - }) - ) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Dashboard dashboard = env.getSource(); + return dashboard.getDataPlatformInstance() != null + ? dashboard.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "datasetProfile", - DatasetProfileMapper::map - ) - ) - .dataFetcher("operations", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "operation", - OperationMapper::map, - new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING) - ) - ) - .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) - .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) - .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient)) - .dataFetcher("testResults", new TestResultsResolver(entityClient)) - .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("runs", new EntityRunsResolver(entityClient)) + final Dashboard dashboard = env.getSource(); + return dashboard.getContainer() != null + ? dashboard.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) + .dataFetcher( + "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("UserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>(corpUserType, - (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())) - ) - .type("ForeignKeyConstraint", typeWiring -> typeWiring - .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType, - (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())) - ) - .type("SiblingProperties", typeWiring -> typeWiring - .dataFetcher("siblings", - new EntityTypeBatchResolver( - new ArrayList<>(entityTypes), - (env) -> ((SiblingProperties) env.getSource()).getSiblings())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ) - .type("DatasetStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "DashboardInfo", + typeWiring -> + typeWiring.dataFetcher( + "charts", + new LoadableTypeBatchResolver<>( + chartType, + (env) -> + ((DashboardInfo) env.getSource()) + .getCharts().stream() + .map(Chart::getUrn) + .collect(Collectors.toList())))); + builder.type( + "DashboardUserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))); + builder.type( + "DashboardStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type. - */ - private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("VersionedDataset", typeWiring -> typeWiring - .dataFetcher("relationships", new StaticDataFetcher(null))); - - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. - */ - private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { - builder.type("AccessToken", typeWiring -> typeWiring - .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType, - (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())) - ); - builder.type("ListAccessTokenResult", typeWiring -> typeWiring - .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType, - (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream() - .map(AccessTokenMetadata::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryNode", typeWiring -> typeWiring - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { - builder.type("SchemaFieldEntity", typeWiring -> typeWiring - .dataFetcher("parent", new EntityTypeResolver(entityTypes, - (env) -> ((SchemaFieldEntity) env.getSource()).getParent())) - ); - } - - private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { - builder.type("EntityPath", typeWiring -> typeWiring - .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes, - (env) -> ((EntityPath) env.getSource()).getPath())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type. - */ - private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpUser", typeWiring -> typeWiring - .dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("CorpUserInfo", typeWiring -> typeWiring - .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType, - (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type. - */ - private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); - builder.type("CorpGroupInfo", typeWiring -> typeWiring - .dataFetcher("admins", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - .dataFetcher("members", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - ) - .type("ListGroupsResult", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType, - (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("Tag", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - builder.type("TagAssociation", typeWiring -> typeWiring - .dataFetcher("tag", - new LoadableTypeResolver<>(tagType, - (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn())) - ); - } - - private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTermAssociation", typeWiring -> typeWiring - .dataFetcher("term", - new LoadableTypeResolver<>(glossaryTermType, - (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())) - ); - } + DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type. + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Chart} type. */ - private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { - builder.type("Notebook", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Notebook notebook = env.getSource(); - return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type. - */ - private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { - builder.type("Dashboard", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>(containerType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) - .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("DashboardInfo", typeWiring -> typeWiring - .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType, - (env) -> ((DashboardInfo) env.getSource()).getCharts().stream() - .map(Chart::getUrn) - .collect(Collectors.toList()))) - ); - builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>( - corpUserType, - (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())) - ); - builder.type("DashboardStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, - (env) -> { - DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type. - */ - private void configureChartResolvers(final RuntimeWiring.Builder builder) { - builder.type("Chart", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>( - containerType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getContainer() != null ? chart.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("ChartInfo", typeWiring -> typeWiring - .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((ChartInfo) env.getSource()).getInputs().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - ); - } - - /** - * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. - */ - private void configureTypeResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Entity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("EntityWithRelationships", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("BrowsableEntity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream() - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("OwnerType", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("PlatformSchema", typeWiring -> typeWiring - .typeResolver(new PlatformSchemaUnionTypeResolver()) - ) - .type("HyperParameterValueType", typeWiring -> typeWiring - .typeResolver(new HyperParameterValueTypeResolver()) - ) - .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) - .type("TimeSeriesAspect", typeWiring -> typeWiring - .typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring - .typeResolver(new ResultsTypeResolver())); - } - - /** - * Configures custom type extensions leveraged within our GraphQL schema. - */ - private void configureTypeExtensions(final RuntimeWiring.Builder builder) { - builder.scalar(GraphQLLong); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type. - */ - private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataJob", typeWiring -> typeWiring + private void configureChartResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Chart", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Chart chart = env.getSource(); + return chart.getDataPlatformInstance() != null + ? chart.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final DataJob dataJob = env.getSource(); - return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + final Chart chart = env.getSource(); + return chart.getContainer() != null + ? chart.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ) - .type("DataJobInputOutput", typeWiring -> typeWiring - .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream() - .map(DataJob::getUrn) - .collect(Collectors.toList()))) - ); - } + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "ChartInfo", + typeWiring -> + typeWiring.dataFetcher( + "inputs", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((ChartInfo) env.getSource()) + .getInputs().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList())))); + } + + /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */ + private void configureTypeResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Entity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "EntityWithRelationships", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "BrowsableEntity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + browsableTypes.stream() + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "OwnerType", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + ownerTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "PlatformSchema", + typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver())) + .type( + "HyperParameterValueType", + typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) + .type( + "TimeSeriesAspect", + typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + } + + /** Configures custom type extensions leveraged within our GraphQL schema. */ + private void configureTypeExtensions(final RuntimeWiring.Builder builder) { + builder.scalar(GraphQLLong); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type. - */ - private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataFlow", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataJob} type. + */ + private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataJob", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "dataFlow", + new LoadableTypeResolver<>( + dataFlowType, + (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataPlatformInstance() != null + ? dataJob.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))) + .type( + "DataJobInputOutput", + typeWiring -> + typeWiring + .dataFetcher( + "inputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "outputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getOutputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "inputDatajobs", + new LoadableTypeBatchResolver<>( + dataJobType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatajobs().stream() + .map(DataJob::getUrn) + .collect(Collectors.toList())))); + } + + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataFlow} type. + */ + private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataFlow", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataFlow dataFlow = env.getSource(); - return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } + final DataFlow dataFlow = env.getSource(); + return dataFlow.getDataPlatformInstance() != null + ? dataFlow.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type. - */ - private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { - builder - .type("MLFeatureTable", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.MLFeatureTable} type. + */ + private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "MLFeatureTable", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeatureTable entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeatureTableProperties", + typeWiring -> + typeWiring + .dataFetcher( + "mlFeatures", + new LoadableTypeBatchResolver<>( + mlFeatureType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlFeatures().stream() + .map(MLFeature::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of())) + .dataFetcher( + "mlPrimaryKeys", + new LoadableTypeBatchResolver<>( + mlPrimaryKeyType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() + != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlPrimaryKeys().stream() + .map(MLPrimaryKey::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of()))) + .type( + "MLFeatureProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, (env) -> { - final MLFeatureTable entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeatureTableProperties", typeWiring -> typeWiring - .dataFetcher("mlFeatures", - new LoadableTypeBatchResolver<>(mlFeatureType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream() - .map(MLFeature::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - .dataFetcher("mlPrimaryKeys", - new LoadableTypeBatchResolver<>(mlPrimaryKeyType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream() - .map(MLPrimaryKey::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - ) - .type("MLFeatureProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLFeatureProperties) env.getSource()).getSources() == null) { + if (((MLFeatureProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLFeatureProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLPrimaryKeyProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { + } + return ((MLFeatureProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLPrimaryKeyProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> { + if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLModel", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return ((MLPrimaryKeyProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLModel", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModel mlModel = env.getSource(); + return mlModel.getDataPlatformInstance() != null + ? mlModel.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLModelProperties", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + mlModelGroupType, (env) -> { - final MLModel mlModel = env.getSource(); - return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLModelProperties", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType, - (env) -> { - MLModelProperties properties = env.getSource(); - if (properties.getGroups() != null) { + MLModelProperties properties = env.getSource(); + if (properties.getGroups() != null) { return properties.getGroups().stream() .map(MLModelGroup::getUrn) .collect(Collectors.toList()); - } - return Collections.emptyList(); - }) - ) - ) - .type("MLModelGroup", typeWiring -> typeWiring + } + return Collections.emptyList(); + }))) + .type( + "MLModelGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModelGroup entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeature", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeature entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLPrimaryKey", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLPrimaryKey entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "GlossaryTerm", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "GlossaryNode", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureDomainResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Domain", + typeWiring -> + typeWiring + .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) + .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "DomainAssociation", + typeWiring -> + typeWiring.dataFetcher( + "domain", + new LoadableTypeResolver<>( + domainType, + (env) -> + ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()) + .getDomain() + .getUrn()))); + } + + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProduct", + typeWiring -> + typeWiring + .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Assertion", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final MLModelGroup entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeature", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Assertion assertion = env.getSource(); + return assertion.getDataPlatformInstance() != null + ? assertion.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + } + + private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { + // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. + builder.type( + "ActorFilter", + typeWiring -> + typeWiring + .dataFetcher( + "resolvedUsers", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - final MLFeature entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLPrimaryKey", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final ActorFilter filter = env.getSource(); + return filter.getUsers(); + })) + .dataFetcher( + "resolvedGroups", + new LoadableTypeBatchResolver<>( + corpGroupType, (env) -> { - final MLPrimaryKey entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))) - .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureDomainResolvers(final RuntimeWiring.Builder builder) { - builder.type("Domain", typeWiring -> typeWiring - .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) - .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("DomainAssociation", typeWiring -> typeWiring - .dataFetcher("domain", - new LoadableTypeResolver<>(domainType, - (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn())) - ); - } - - private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProduct", typeWiring -> typeWiring - .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - } - - private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { - builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Assertion assertion = env.getSource(); - return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); - } - - private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { - // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. - builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers", - new LoadableTypeBatchResolver<>(corpUserType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getUsers(); - })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getGroups(); - })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getRoles(); - })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getResourceOwnersTypes(); - }))); - } - - private void configureRoleResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataHubRole", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureViewResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataHubView", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListViewsResult", typeWiring -> typeWiring - .dataFetcher("views", new LoadableTypeBatchResolver<>( - dataHubViewType, - (env) -> ((ListViewsResult) env.getSource()).getViews().stream() - .map(DataHubView::getUrn) - .collect(Collectors.toList()))) - ) - .type("CorpUserViewsSettings", typeWiring -> typeWiring - .dataFetcher("defaultView", new LoadableTypeResolver<>( + final ActorFilter filter = env.getSource(); + return filter.getGroups(); + })) + .dataFetcher( + "resolvedRoles", + new LoadableTypeBatchResolver<>( + dataHubRoleType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getRoles(); + })) + .dataFetcher( + "resolvedOwnershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getResourceOwnersTypes(); + }))); + } + + private void configureRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataHubRole", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureViewResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataHubView", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListViewsResult", + typeWiring -> + typeWiring.dataFetcher( + "views", + new LoadableTypeBatchResolver<>( + dataHubViewType, + (env) -> + ((ListViewsResult) env.getSource()) + .getViews().stream() + .map(DataHubView::getUrn) + .collect(Collectors.toList())))) + .type( + "CorpUserViewsSettings", + typeWiring -> + typeWiring.dataFetcher( + "defaultView", + new LoadableTypeResolver<>( dataHubViewType, (env) -> { - final CorpUserViewsSettings settings = env.getSource(); - if (settings.getDefaultView() != null) { - return settings.getDefaultView().getUrn(); - } - return null; - } - ) - )); - } - - private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("QueryEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListQueriesResult", typeWiring -> typeWiring - .dataFetcher("queries", new LoadableTypeBatchResolver<>( - queryType, - (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream() - .map(QueryEntity::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuerySubject", typeWiring -> typeWiring - .dataFetcher("dataset", new LoadableTypeResolver<>( - datasetType, - (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())) - ); - - } - - private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { - builder - .type("OwnershipTypeEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListOwnershipTypesResult", typeWiring -> typeWiring - .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType, - (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream() - .map(OwnershipTypeEntity::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProcessInstance", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + final CorpUserViewsSettings settings = env.getSource(); + if (settings.getDefaultView() != null) { + return settings.getDefaultView().getUrn(); + } + return null; + }))); + } + + private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "QueryEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListQueriesResult", + typeWiring -> + typeWiring.dataFetcher( + "queries", + new LoadableTypeBatchResolver<>( + queryType, + (env) -> + ((ListQueriesResult) env.getSource()) + .getQueries().stream() + .map(QueryEntity::getUrn) + .collect(Collectors.toList())))) + .type( + "QuerySubject", + typeWiring -> + typeWiring.dataFetcher( + "dataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))); + } + + private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "OwnershipTypeEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListOwnershipTypesResult", + typeWiring -> + typeWiring.dataFetcher( + "ownershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> + ((ListOwnershipTypesResult) env.getSource()) + .getOwnershipTypes().stream() + .map(OwnershipTypeEntity::getUrn) + .collect(Collectors.toList())))); + } + + private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProcessInstance", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance", - DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map))); - } - - private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { - builder.type("TestResult", typeWiring -> typeWiring - .dataFetcher("test", new LoadableTypeResolver<>(testType, - (env) -> { - final TestResult testResult = env.getSource(); - return testResult.getTest() != null ? testResult.getTest().getUrn() : null; - })) - ); - } - - private DataLoader> createDataLoader(final LoadableType graphType, final QueryContext queryContext) { - BatchLoaderContextProvider contextProvider = () -> queryContext; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); - return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> { - try { - log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys)); - return graphType.batchLoad(keys, context.getContext()); - } catch (Exception e) { - log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e); - } - }), loaderOptions); - } - - private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { - builder.type("IngestionSource", typeWiring -> typeWiring - .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> { - final IngestionSource ingestionSource = env.getSource(); - return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null; - }) - )); - } + .dataFetcher( + "state", + new TimeSeriesAspectResolver( + this.entityClient, + "dataProcessInstance", + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + DataProcessInstanceRunEventMapper::map))); + } + + private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "TestResult", + typeWiring -> + typeWiring.dataFetcher( + "test", + new LoadableTypeResolver<>( + testType, + (env) -> { + final TestResult testResult = env.getSource(); + return testResult.getTest() != null ? testResult.getTest().getUrn() : null; + }))); + } + + private DataLoader> createDataLoader( + final LoadableType graphType, final QueryContext queryContext) { + BatchLoaderContextProvider contextProvider = () -> queryContext; + DataLoaderOptions loaderOptions = + DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); + return DataLoader.newDataLoader( + (keys, context) -> + CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + String.format( + "Batch loading entities of type: %s, keys: %s", + graphType.name(), keys)); + return graphType.batchLoad(keys, context.getContext()); + } catch (Exception e) { + log.error( + String.format( + "Failed to load Entities of type: %s, keys: %s", + graphType.name(), keys) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to retrieve entities of type %s", graphType.name()), + e); + } + }), + loaderOptions); + } + + private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "IngestionSource", + typeWiring -> + typeWiring + .dataFetcher( + "executions", new IngestionSourceExecutionRequestsResolver(entityClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final IngestionSource ingestionSource = env.getSource(); + return ingestionSource.getPlatform() != null + ? ingestionSource.getPlatform().getUrn() + : null; + }))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 157fb10ce7078..4829194a8ce4d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -38,41 +38,41 @@ @Data public class GmsGraphQLEngineArgs { - EntityClient entityClient; - SystemEntityClient systemEntityClient; - GraphClient graphClient; - UsageClient usageClient; - AnalyticsService analyticsService; - EntityService entityService; - RecommendationsService recommendationsService; - StatefulTokenService statefulTokenService; - TimeseriesAspectService timeseriesAspectService; - EntityRegistry entityRegistry; - SecretService secretService; - NativeUserService nativeUserService; - IngestionConfiguration ingestionConfiguration; - AuthenticationConfiguration authenticationConfiguration; - AuthorizationConfiguration authorizationConfiguration; - GitVersion gitVersion; - TimelineService timelineService; - boolean supportsImpactAnalysis; - VisualConfiguration visualConfiguration; - TelemetryConfiguration telemetryConfiguration; - TestsConfiguration testsConfiguration; - DataHubConfiguration datahubConfiguration; - ViewsConfiguration viewsConfiguration; - SiblingGraphService siblingGraphService; - GroupService groupService; - RoleService roleService; - InviteTokenService inviteTokenService; - PostService postService; - ViewService viewService; - OwnershipTypeService ownershipTypeService; - SettingsService settingsService; - LineageService lineageService; - QueryService queryService; - FeatureFlags featureFlags; - DataProductService dataProductService; + EntityClient entityClient; + SystemEntityClient systemEntityClient; + GraphClient graphClient; + UsageClient usageClient; + AnalyticsService analyticsService; + EntityService entityService; + RecommendationsService recommendationsService; + StatefulTokenService statefulTokenService; + TimeseriesAspectService timeseriesAspectService; + EntityRegistry entityRegistry; + SecretService secretService; + NativeUserService nativeUserService; + IngestionConfiguration ingestionConfiguration; + AuthenticationConfiguration authenticationConfiguration; + AuthorizationConfiguration authorizationConfiguration; + GitVersion gitVersion; + TimelineService timelineService; + boolean supportsImpactAnalysis; + VisualConfiguration visualConfiguration; + TelemetryConfiguration telemetryConfiguration; + TestsConfiguration testsConfiguration; + DataHubConfiguration datahubConfiguration; + ViewsConfiguration viewsConfiguration; + SiblingGraphService siblingGraphService; + GroupService groupService; + RoleService roleService; + InviteTokenService inviteTokenService; + PostService postService; + ViewService viewService; + OwnershipTypeService ownershipTypeService; + SettingsService settingsService; + LineageService lineageService; + QueryService queryService; + FeatureFlags featureFlags; + DataProductService dataProductService; - //any fork specific args should go below this line + // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index e7ef0c402a1de..472d9465aeee1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -5,41 +5,42 @@ import java.util.Collection; import java.util.List; - /** - * An interface that allows the Core GMS GraphQL Engine to be extended without requiring - * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers - * need to be introduced. This is useful if you are maintaining a fork of DataHub and - * don't want to deal with merge conflicts. + * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code + * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be + * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with + * merge conflicts. */ public interface GmsGraphQLPlugin { /** * Initialization method that allows the plugin to instantiate + * * @param args */ void init(GmsGraphQLEngineArgs args); /** - * Return a list of schema files that contain graphql definitions - * that are served by this plugin + * Return a list of schema files that contain graphql definitions that are served by this plugin + * * @return */ List getSchemaFiles(); /** * Return a list of LoadableTypes that this plugin serves + * * @return */ Collection> getLoadableTypes(); /** - * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers. + * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific + * resolvers. + * * @param wiringBuilder : the builder being used to configure the runtime wiring * @param baseEngine : a reference to the core engine and its graphql types */ - default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) { - - } - + default void configureExtraResolvers( + final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index 74c4c541b972b..f95727a1e8fd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static graphql.schema.idl.RuntimeWiring.*; + import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler; import graphql.ExecutionInput; import graphql.ExecutionResult; @@ -22,152 +24,157 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import static graphql.schema.idl.RuntimeWiring.*; - /** - * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing - * GQL queries. - - *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more - * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and + * executing GQL queries. + * + *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances + * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. * - *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables. + *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set + * of variables. */ public class GraphQLEngine { - private final GraphQL _graphQL; - private final Map>> _dataLoaderSuppliers; + private final GraphQL _graphQL; + private final Map>> _dataLoaderSuppliers; - private GraphQLEngine(@Nonnull final List schemas, - @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map>> dataLoaderSuppliers) { + private GraphQLEngine( + @Nonnull final List schemas, + @Nonnull final RuntimeWiring runtimeWiring, + @Nonnull final Map>> dataLoaderSuppliers) { - _dataLoaderSuppliers = dataLoaderSuppliers; + _dataLoaderSuppliers = dataLoaderSuppliers; - /* - * Parse schema - */ - SchemaParser schemaParser = new SchemaParser(); - TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); - schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); + /* + * Parse schema + */ + SchemaParser schemaParser = new SchemaParser(); + TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); + schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); - /* - * Configure resolvers (data fetchers) - */ - SchemaGenerator schemaGenerator = new SchemaGenerator(); - GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + /* + * Configure resolvers (data fetchers) + */ + SchemaGenerator schemaGenerator = new SchemaGenerator(); + GraphQLSchema graphQLSchema = + schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); - /* - * Instantiate engine - */ - _graphQL = new GraphQL.Builder(graphQLSchema) + /* + * Instantiate engine + */ + _graphQL = + new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) .instrumentation(new TracingInstrumentation()) .build(); - } + } + + public ExecutionResult execute( + @Nonnull final String query, + @Nullable final Map variables, + @Nonnull final QueryContext context) { + /* + * Init DataLoaderRegistry - should be created for each request. + */ + DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - public ExecutionResult execute(@Nonnull final String query, - @Nullable final Map variables, - @Nonnull final QueryContext context) { - /* - * Init DataLoaderRegistry - should be created for each request. - */ - DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - - /* - * Construct execution input - */ - ExecutionInput executionInput = ExecutionInput.newExecutionInput() + /* + * Construct execution input + */ + ExecutionInput executionInput = + ExecutionInput.newExecutionInput() .query(query) .variables(variables) .dataLoaderRegistry(register) .context(context) .build(); - /* - * Execute GraphQL Query - */ - return _graphQL.execute(executionInput); - } + /* + * Execute GraphQL Query + */ + return _graphQL.execute(executionInput); + } + + public GraphQL getGraphQL() { + return _graphQL; + } + + public static Builder builder() { + return new Builder(); + } + + /** Used to construct a {@link GraphQLEngine}. */ + public static class Builder { - public GraphQL getGraphQL() { - return _graphQL; + private final List _schemas = new ArrayList<>(); + private final Map>> _loaderSuppliers = + new HashMap<>(); + private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + + /** + * Used to add a schema file containing the GQL types resolved by the engine. + * + *

If multiple files are provided, their schemas will be merged together. + */ + public Builder addSchema(final String schema) { + _schemas.add(schema); + return this; } - public static Builder builder() { - return new Builder(); + /** + * Used to register a {@link DataLoader} to be used within the configured resolvers. + * + *

The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} + * when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoader( + final String name, final Function> dataLoaderSupplier) { + _loaderSuppliers.put(name, dataLoaderSupplier); + return this; } /** - * Used to construct a {@link GraphQLEngine}. + * Used to register multiple {@link DataLoader}s for use within the configured resolvers. + * + *

The included {@link Supplier} provided is expected to return a new instance of {@link + * DataLoader} when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. */ - public static class Builder { - - private final List _schemas = new ArrayList<>(); - private final Map>> _loaderSuppliers = new HashMap<>(); - private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); - - /** - * Used to add a schema file containing the GQL types resolved by the engine. - * - * If multiple files are provided, their schemas will be merged together. - */ - public Builder addSchema(final String schema) { - _schemas.add(schema); - return this; - } - - /** - * Used to register a {@link DataLoader} to be used within the configured resolvers. - * - * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoader(final String name, final Function> dataLoaderSupplier) { - _loaderSuppliers.put(name, dataLoaderSupplier); - return this; - } - - /** - * Used to register multiple {@link DataLoader}s for use within the configured resolvers. - * - * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoaders(Map>> dataLoaderSuppliers) { - _loaderSuppliers.putAll(dataLoaderSuppliers); - return this; - } - - /** - * Used to configure the runtime wiring (data fetchers & type resolvers) - * used in resolving the Graph QL schema. - * - * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required - * data + type resolvers. - */ - public Builder configureRuntimeWiring(final Consumer builderFunc) { - builderFunc.accept(_runtimeWiringBuilder); - return this; - } - - /** - * Builds a {@link GraphQLEngine}. - */ - public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); - } + public Builder addDataLoaders( + Map>> dataLoaderSuppliers) { + _loaderSuppliers.putAll(dataLoaderSuppliers); + return this; } - private DataLoaderRegistry createDataLoaderRegistry(final Map>> dataLoaderSuppliers, - final QueryContext context) { - final DataLoaderRegistry registry = new DataLoaderRegistry(); - for (String key : dataLoaderSuppliers.keySet()) { - registry.register(key, dataLoaderSuppliers.get(key).apply(context)); - } - return registry; + /** + * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the + * Graph QL schema. + * + *

The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register + * any required data + type resolvers. + */ + public Builder configureRuntimeWiring(final Consumer builderFunc) { + builderFunc.accept(_runtimeWiringBuilder); + return this; } + /** Builds a {@link GraphQLEngine}. */ + public GraphQLEngine build() { + return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); + } + } + + private DataLoaderRegistry createDataLoaderRegistry( + final Map>> dataLoaderSuppliers, + final QueryContext context) { + final DataLoaderRegistry registry = new DataLoaderRegistry(); + for (String key : dataLoaderSuppliers.keySet()) { + registry.register(key, dataLoaderSuppliers.get(key).apply(context)); + } + return registry; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 4803ef08fdddc..9f110e713ed57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -4,38 +4,25 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; - -/** - * Provided as input to GraphQL resolvers; used to carry information about GQL request context. - */ +/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { - /** - * Returns true if the current actor is authenticated, false otherwise. - */ - boolean isAuthenticated(); + /** Returns true if the current actor is authenticated, false otherwise. */ + boolean isAuthenticated(); - /** - * Returns the {@link Authentication} associated with the current query context. - */ - Authentication getAuthentication(); + /** Returns the {@link Authentication} associated with the current query context. */ + Authentication getAuthentication(); - /** - * Returns the current authenticated actor, null if there is none. - */ - default Actor getActor() { - return getAuthentication().getActor(); - } + /** Returns the current authenticated actor, null if there is none. */ + default Actor getActor() { + return getAuthentication().getActor(); + } - /** - * Returns the current authenticated actor, null if there is none. - */ - default String getActorUrn() { - return getActor().toUrnStr(); - } + /** Returns the current authenticated actor, null if there is none. */ + default String getActorUrn() { + return getActor().toUrnStr(); + } - /** - * Returns the authorizer used to authorize specific actions. - */ - Authorizer getAuthorizer(); + /** Returns the authorizer used to authorize specific actions. */ + Authorizer getAuthorizer(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java index df7f0884852d4..425c86ab0f0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class RelationshipKey { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java index d51de6652bb0a..c3ad37ddcb201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java @@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs { private Long count; private TimeRange timeRange; - public TimeSeriesAspectArgs( - String urn, - String aspectName, - Long count, - TimeRange timeRange) { + public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) { this.urn = urn; this.aspectName = aspectName; this.count = count; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java index 5f703f520bde4..c7302c9772c5e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java @@ -3,7 +3,6 @@ import com.linkedin.usage.UsageTimeRange; import lombok.Data; - @Data public class UsageStatsKey { private String resource; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java index b0c0436ffd891..6f81de5f04d8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java @@ -8,7 +8,7 @@ public class VersionedAspectKey { private String urn; private Long version; - public VersionedAspectKey(String urn, String aspectName, Long version) { + public VersionedAspectKey(String urn, String aspectName, Long version) { this.urn = urn; this.version = version; this.aspectName = aspectName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index a78d89e59bc7b..22ee4d4d4845c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.codec.JacksonDataCodec; @@ -26,68 +28,84 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @AllArgsConstructor public class WeaklyTypedAspectsResolver implements DataFetcher>> { - private final EntityClient _entityClient; - private final EntityRegistry _entityRegistry; - private static final JacksonDataCodec CODEC = new JacksonDataCodec(); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + private static final JacksonDataCodec CODEC = new JacksonDataCodec(); - private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { - return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); - } + private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { + return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - List results = new ArrayList<>(); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + List results = new ArrayList<>(); - final QueryContext context = environment.getContext(); - final String urnStr = ((Entity) environment.getSource()).getUrn(); - final EntityType entityType = ((Entity) environment.getSource()).getType(); - final String entityTypeName = EntityTypeMapper.getName(entityType); - final AspectParams input = bindArgument(environment.getArgument("input"), AspectParams.class); + final QueryContext context = environment.getContext(); + final String urnStr = ((Entity) environment.getSource()).getUrn(); + final EntityType entityType = ((Entity) environment.getSource()).getType(); + final String entityTypeName = EntityTypeMapper.getName(entityType); + final AspectParams input = + bindArgument(environment.getArgument("input"), AspectParams.class); - EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); - entitySpec.getAspectSpecs().stream().filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)).forEach(aspectSpec -> { - try { - Urn urn = Urn.createFromString(urnStr); - RawAspect result = new RawAspect(); - EntityResponse entityResponse = - _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(aspectSpec.getName()), context.getAuthentication()).get(urn); - if (entityResponse == null || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { + EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); + entitySpec.getAspectSpecs().stream() + .filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)) + .forEach( + aspectSpec -> { + try { + Urn urn = Urn.createFromString(urnStr); + RawAspect result = new RawAspect(); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(aspectSpec.getName()), + context.getAuthentication()) + .get(urn); + if (entityResponse == null + || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { return; - } + } - DataMap resolvedAspect = entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); - if (resolvedAspect == null) { + DataMap resolvedAspect = + entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); + if (resolvedAspect == null) { return; - } + } - result.setPayload(CODEC.mapToString(resolvedAspect)); - result.setAspectName(aspectSpec.getName()); + result.setPayload(CODEC.mapToString(resolvedAspect)); + result.setAspectName(aspectSpec.getName()); - DataMap renderSpec = aspectSpec.getRenderSpec(); + DataMap renderSpec = aspectSpec.getRenderSpec(); - if (renderSpec != null) { + if (renderSpec != null) { AspectRenderSpec resultRenderSpec = new AspectRenderSpec(); resultRenderSpec.setDisplayType(renderSpec.getString("displayType")); resultRenderSpec.setDisplayName(renderSpec.getString("displayName")); resultRenderSpec.setKey(renderSpec.getString("key")); result.setRenderSpec(resultRenderSpec); - } + } - results.add(result); - } catch (IOException | RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + aspectSpec.getName() + " for urn " + urnStr + " ", e); - } - }); - return results; + results.add(result); + } catch (IOException | RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + + aspectSpec.getName() + + " for urn " + + urnStr + + " ", + e); + } + }); + return results; }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java index 7728dcae5d8ee..3bf932c4281e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java @@ -7,18 +7,17 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - public class AnalyticsChartTypeResolver implements TypeResolver { - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TimeSeriesChart) { - return env.getSchema().getObjectType("TimeSeriesChart"); - } else if (env.getObject() instanceof BarChart) { - return env.getSchema().getObjectType("BarChart"); - } else if (env.getObject() instanceof TableChart) { - return env.getSchema().getObjectType("TableChart"); - } else { - throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TimeSeriesChart) { + return env.getSchema().getObjectType("TimeSeriesChart"); + } else if (env.getObject() instanceof BarChart) { + return env.getSchema().getObjectType("BarChart"); + } else if (env.getObject() instanceof TableChart) { + return env.getSchema().getObjectType("TableChart"); + } else { + throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index b8a5dd1121a10..3f635872747a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -27,15 +27,11 @@ import java.util.Collections; import java.util.List; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @Slf4j @RequiredArgsConstructor public final class GetChartsResolver implements DataFetcher> { @@ -47,15 +43,17 @@ public final class GetChartsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { Authentication authentication = ResolverUtils.getAuthentication(environment); try { - return ImmutableList.of(AnalyticsChartGroup.builder() - .setGroupId("DataHubUsageAnalytics") - .setTitle("DataHub Usage Analytics") - .setCharts(getProductAnalyticsCharts(authentication)) - .build(), AnalyticsChartGroup.builder() - .setGroupId("GlobalMetadataAnalytics") - .setTitle("Data Landscape Summary") - .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) - .build()); + return ImmutableList.of( + AnalyticsChartGroup.builder() + .setGroupId("DataHubUsageAnalytics") + .setTitle("DataHub Usage Analytics") + .setCharts(getProductAnalyticsCharts(authentication)) + .build(), + AnalyticsChartGroup.builder() + .setGroupId("GlobalMetadataAnalytics") + .setTitle("Data Landscape Summary") + .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) + .build()); } catch (Exception e) { log.error("Failed to retrieve analytics charts!", e); return Collections.emptyList(); // Simply return nothing. @@ -63,85 +61,115 @@ public final List get(DataFetchingEnvironment environment) } private TimeSeriesChart getActiveUsersTimeSeriesChart( - final DateTime beginning, - final DateTime end, - final String title, - final DateInterval interval - ) { + final DateTime beginning, + final DateTime end, + final String title, + final DateInterval interval) { final DateRange dateRange = - new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); + new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); final List timeSeriesLines = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), dateRange, interval, - Optional.empty(), ImmutableMap.of(), Collections.emptyMap(), Optional.of("browserId")); + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + dateRange, + interval, + Optional.empty(), + ImmutableMap.of(), + Collections.emptyMap(), + Optional.of("browserId")); return TimeSeriesChart.builder() - .setTitle(title) - .setDateRange(dateRange) - .setInterval(interval) - .setLines(timeSeriesLines) - .build(); + .setTitle(title) + .setDateRange(dateRange) + .setInterval(interval) + .setLines(timeSeriesLines) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ - private List getProductAnalyticsCharts(Authentication authentication) throws Exception { + /** TODO: Config Driven Charts Instead of Hardcoded. */ + private List getProductAnalyticsCharts(Authentication authentication) + throws Exception { final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); - charts.add(getActiveUsersTimeSeriesChart( + charts.add( + getActiveUsersTimeSeriesChart( startOfNextWeek.minusWeeks(10), startOfNextWeek.minusMillis(1), "Weekly Active Users", - DateInterval.WEEK - )); - charts.add(getActiveUsersTimeSeriesChart( + DateInterval.WEEK)); + charts.add( + getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), startOfNextMonth.minusMillis(1), "Monthly Active Users", - DateInterval.MONTH - )); + DateInterval.MONTH)); String searchesTitle = "Searches Last Week"; DateInterval dailyInterval = DateInterval.DAY; String searchEventType = "SearchEvent"; final List searchesTimeseries = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), trailingWeekDateRange, dailyInterval, - Optional.empty(), ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + trailingWeekDateRange, + dailyInterval, + Optional.empty(), + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), Optional.empty()); - charts.add(TimeSeriesChart.builder() - .setTitle(searchesTitle) - .setDateRange(trailingWeekDateRange) - .setInterval(dailyInterval) - .setLines(searchesTimeseries) - .build()); + charts.add( + TimeSeriesChart.builder() + .setTitle(searchesTitle) + .setDateRange(trailingWeekDateRange) + .setInterval(dailyInterval) + .setLines(searchesTimeseries) + .build()); final String topSearchTitle = "Top Search Queries"; final List columns = ImmutableList.of("Query", "Count"); final List topSearchQueries = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "query.keyword", ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), - Optional.empty(), 10, AnalyticsUtil::buildCellWithSearchLandingPage); - charts.add(TableChart.builder().setTitle(topSearchTitle).setColumns(columns).setRows(topSearchQueries).build()); + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "query.keyword", + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithSearchLandingPage); + charts.add( + TableChart.builder() + .setTitle(topSearchTitle) + .setColumns(columns) + .setRows(topSearchQueries) + .build()); final String sectionViewsTitle = "Section Views across Entity Types"; final List sectionViewsPerEntityType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "section.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), Collections.emptyMap(), - Optional.empty(), true); - charts.add(BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); + ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), + Collections.emptyMap(), + Optional.empty(), + true); + charts.add( + BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); final String actionsByTypeTitle = "Actions by Entity Type"; final List eventsByEventType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "actionType.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), Collections.emptyMap(), Optional.empty(), + ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), + Collections.emptyMap(), + Optional.empty(), true); charts.add(BarChart.builder().setTitle(actionsByTypeTitle).setBars(eventsByEventType).build()); @@ -149,61 +177,128 @@ private List getProductAnalyticsCharts(Authentication authentica final List columns5 = ImmutableList.of("Dataset", "#Views"); final List topViewedDatasets = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "entityUrn.keyword", ImmutableMap.of("type", ImmutableList.of("EntityViewEvent"), "entityType.keyword", - ImmutableList.of(EntityType.DATASET.name())), Collections.emptyMap(), Optional.empty(), 10, + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DATASET.name())), + Collections.emptyMap(), + Optional.empty(), + 10, AnalyticsUtil::buildCellWithEntityLandingPage); - AnalyticsUtil.hydrateDisplayNameForTable(_entityClient, topViewedDatasets, Constants.DATASET_ENTITY_NAME, - ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), AnalyticsUtil::getDatasetName, authentication); - charts.add(TableChart.builder().setTitle(topViewedTitle).setColumns(columns5).setRows(topViewedDatasets).build()); + AnalyticsUtil.hydrateDisplayNameForTable( + _entityClient, + topViewedDatasets, + Constants.DATASET_ENTITY_NAME, + ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), + AnalyticsUtil::getDatasetName, + authentication); + charts.add( + TableChart.builder() + .setTitle(topViewedTitle) + .setColumns(columns5) + .setRows(topViewedDatasets) + .build()); return charts; } - private List getGlobalMetadataAnalyticsCharts(Authentication authentication) throws Exception { + private List getGlobalMetadataAnalyticsCharts(Authentication authentication) + throws Exception { final List charts = new ArrayList<>(); // Chart 1: Entities per domain final List entitiesPerDomain = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("domains.keyword", "platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerDomain, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - AnalyticsUtil.hydrateDisplayNameForSegments(_entityClient, entitiesPerDomain, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("domains.keyword", "platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerDomain, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); + AnalyticsUtil.hydrateDisplayNameForSegments( + _entityClient, + entitiesPerDomain, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerDomain.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); + charts.add( + BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); } // Chart 2: Entities per platform final List entitiesPerPlatform = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerPlatform, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerPlatform, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerPlatform.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Platform").setBars(entitiesPerPlatform).build()); + charts.add( + BarChart.builder() + .setTitle("Entities per Platform") + .setBars(entitiesPerPlatform) + .build()); } // Chart 3: Entities per term final List entitiesPerTerm = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("glossaryTerms.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerTerm, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("glossaryTerms.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerTerm, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); if (!entitiesPerTerm.isEmpty()) { charts.add(BarChart.builder().setTitle("Entities per Term").setBars(entitiesPerTerm).build()); } // Chart 4: Entities per fabric type final List entitiesPerEnv = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("origin.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("origin.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); if (entitiesPerEnv.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); + charts.add( + BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); } return charts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java index c631a13b0bcb6..7000ab7adff5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java @@ -14,15 +14,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetHighlightsResolver implements DataFetcher> { @@ -40,69 +36,72 @@ public final List get(DataFetchingEnvironment environment) throws Exc } private Highlight getTimeBasedHighlight( - final String title, - final String changeString, - final DateTime endDateTime, - final Function periodStartFunc - ) { + final String title, + final String changeString, + final DateTime endDateTime, + final Function periodStartFunc) { DateTime startDate = periodStartFunc.apply(endDateTime); DateTime timeBeforeThat = periodStartFunc.apply(startDate); - DateRange dateRangeThis = new DateRange( - String.valueOf(startDate.getMillis()), - String.valueOf(endDateTime.getMillis()) - ); - DateRange dateRangeLast = new DateRange( - String.valueOf(timeBeforeThat.getMillis()), - String.valueOf(startDate.getMillis()) - ); - - int activeUsersThisRange = _analyticsService.getHighlights( + DateRange dateRangeThis = + new DateRange( + String.valueOf(startDate.getMillis()), String.valueOf(endDateTime.getMillis())); + DateRange dateRangeLast = + new DateRange( + String.valueOf(timeBeforeThat.getMillis()), String.valueOf(startDate.getMillis())); + + int activeUsersThisRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeThis), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); - int activeUsersLastRange = _analyticsService.getHighlights( + Optional.of("browserId")); + int activeUsersLastRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeLast), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); + Optional.of("browserId")); String bodyText = ""; if (activeUsersLastRange > 0) { - double percentChange = (double) (activeUsersThisRange - activeUsersLastRange) - / (double) activeUsersLastRange * 100; + double percentChange = + (double) (activeUsersThisRange - activeUsersLastRange) + / (double) activeUsersLastRange + * 100; String directionChange = percentChange > 0 ? "increase" : "decrease"; - bodyText = Double.isInfinite(percentChange) ? "" + bodyText = + Double.isInfinite(percentChange) + ? "" : String.format(changeString, percentChange, directionChange); } - return Highlight.builder().setTitle(title).setValue(activeUsersThisRange).setBody(bodyText).build(); + return Highlight.builder() + .setTitle(title) + .setValue(activeUsersThisRange) + .setBody(bodyText) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ + /** TODO: Config Driven Charts Instead of Hardcoded. */ private List getHighlights() { final List highlights = new ArrayList<>(); DateTime endDate = DateTime.now(); - highlights.add(getTimeBasedHighlight( + highlights.add( + getTimeBasedHighlight( "Weekly Active Users", "%.2f%% %s from last week", endDate, - (date) -> date.minusWeeks(1) - )); - highlights.add(getTimeBasedHighlight( + (date) -> date.minusWeeks(1))); + highlights.add( + getTimeBasedHighlight( "Monthly Active Users", "%.2f%% %s from last month", endDate, - (date) -> date.minusMonths(1) - )); + (date) -> date.minusMonths(1))); // Entity metdata statistics getEntityMetadataStats("Datasets", EntityType.DATASET).ifPresent(highlights::add); @@ -121,10 +120,13 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (numEntities == 0) { return Optional.empty(); } - int numEntitiesWithOwners = getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); - int numEntitiesWithTags = getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); + int numEntitiesWithOwners = + getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); + int numEntitiesWithTags = + getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); int numEntitiesWithGlossaryTerms = - getNumEntitiesFiltered(index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); + getNumEntitiesFiltered( + index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); int numEntitiesWithDescription = getNumEntitiesFiltered(index, ImmutableMap.of("hasDescription", ImmutableList.of("true"))); @@ -137,22 +139,36 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (entityType == EntityType.DOMAIN) { // Don't show percent with domain when asking for stats regarding domains bodyText = - String.format("%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription); + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription); } else { int numEntitiesWithDomains = getNumEntitiesFiltered(index, ImmutableMap.of("hasDomain", ImmutableList.of("true"))); double percentWithDomains = 100.0 * numEntitiesWithDomains / numEntities; - bodyText = String.format( - "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription, percentWithDomains); + bodyText = + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription, + percentWithDomains); } } - return Optional.of(Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); + return Optional.of( + Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); } private int getNumEntitiesFiltered(String index, Map> filters) { - return _analyticsService.getHighlights(index, Optional.empty(), filters, - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty()); + return _analyticsService.getHighlights( + index, + Optional.empty(), + filters, + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index f61c2eb77739b..31a8359f8f0e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.analytics.resolver; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -30,12 +32,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetMetadataAnalyticsResolver implements DataFetcher> { @@ -45,7 +42,8 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final Authentication authentication = ResolverUtils.getAuthentication(environment); - final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); + final MetadataAnalyticsInput input = + bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); try { final AnalyticsChartGroup group = new AnalyticsChartGroup(); @@ -59,7 +57,8 @@ public final List get(DataFetchingEnvironment environment) } } - private List getCharts(MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List getCharts( + MetadataAnalyticsInput input, Authentication authentication) throws Exception { final List charts = new ArrayList<>(); List entities = Collections.emptyList(); @@ -77,48 +76,76 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat filter = QueryUtils.newFilter("domains.keyword", input.getDomain()); } - SearchResult searchResult = _entityClient.searchAcrossEntities(entities, query, filter, 0, 0, - null, null, authentication); + SearchResult searchResult = + _entityClient.searchAcrossEntities( + entities, query, filter, 0, 0, null, null, authentication); - List aggregationMetadataList = searchResult.getMetadata().getAggregations(); + List aggregationMetadataList = + searchResult.getMetadata().getAggregations(); Optional domainAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("domains")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("domains")) + .findFirst(); if (StringUtils.isEmpty(input.getDomain()) && domainAggregation.isPresent()) { List domainChart = buildBarChart(domainAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, domainChart, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + domainChart, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } Optional platformAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("platform")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("platform")) + .findFirst(); if (platformAggregation.isPresent()) { List platformChart = buildBarChart(platformAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, platformChart, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + platformChart, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); + charts.add( + BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } Optional termAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("glossaryTerms")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("glossaryTerms")) + .findFirst(); if (termAggregation.isPresent()) { List termChart = buildBarChart(termAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, termChart, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + termChart, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } Optional envAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("origin")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("origin")) + .findFirst(); if (envAggregation.isPresent()) { List termChart = buildBarChart(envAggregation.get()); if (termChart.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); + charts.add( + BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); } } @@ -126,16 +153,20 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat } private List buildBarChart(AggregationMetadata aggregation) { - return aggregation.getAggregations() - .entrySet() - .stream() + return aggregation.getAggregations().entrySet().stream() .sorted(Collections.reverseOrder(Map.Entry.comparingByValue())) .limit(10) - .map(entry -> NamedBar.builder() - .setName(entry.getKey()) - .setSegments(ImmutableList.of( - BarSegment.builder().setLabel("#Entities").setValue(entry.getValue().intValue()).build())) - .build()) + .map( + entry -> + NamedBar.builder() + .setName(entry.getKey()) + .setSegments( + ImmutableList.of( + BarSegment.builder() + .setLabel("#Entities") + .setValue(entry.getValue().intValue()) + .build())) + .build()) .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java index 8e3bffc9ccf08..c7f5c0bbc63eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java @@ -3,20 +3,17 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -/** - * Returns true if analytics feature flag is enabled, false otherwise. - */ +/** Returns true if analytics feature flag is enabled, false otherwise. */ public class IsAnalyticsEnabledResolver implements DataFetcher { private final Boolean _isAnalyticsEnabled; public IsAnalyticsEnabledResolver(final Boolean isAnalyticsEnabled) { - _isAnalyticsEnabled = isAnalyticsEnabled; + _isAnalyticsEnabled = isAnalyticsEnabled; } @Override public final Boolean get(DataFetchingEnvironment environment) throws Exception { - return _isAnalyticsEnabled; + return _isAnalyticsEnabled; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 4135a7b0da148..03333bda05f61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -40,7 +40,6 @@ import org.opensearch.search.aggregations.metrics.Cardinality; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class AnalyticsService { @@ -72,25 +71,35 @@ public String getUsageIndexName() { return _indexConvention.getIndexName(DATAHUB_USAGE_EVENT_INDEX); } - public List getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity, + public List getTimeseriesChart( + String indexName, + DateRange dateRange, + DateInterval granularity, Optional dimension, // Length 1 for now - Map> filters, Map> mustNotFilters, Optional uniqueOn) { + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", - indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + String.format("filters: %s, uniqueOn: %s", filters, - uniqueOn)); - - AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); - - AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM) - .field("timestamp") - .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); + String.format( + "Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", + indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + + AggregationBuilder filteredAgg = + getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); + + AggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(DATE_HISTOGRAM) + .field("timestamp") + .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); uniqueOn.ifPresent(s -> dateHistogram.subAggregation(getUniqueQuery(s))); if (dimension.isPresent()) { filteredAgg.subAggregation( - AggregationBuilders.terms(DIMENSION).field(dimension.get()).subAggregation(dateHistogram)); + AggregationBuilders.terms(DIMENSION) + .field(dimension.get()) + .subAggregation(dateHistogram)); } else { filteredAgg.subAggregation(dateHistogram); } @@ -99,39 +108,55 @@ public List getTimeseriesChart(String indexName, DateRange dateRange, Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { if (dimension.isPresent()) { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedLine(bucket.getKeyAsString(), - extractPointsFromAggregations(bucket.getAggregations(), uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedLine( + bucket.getKeyAsString(), + extractPointsFromAggregations( + bucket.getAggregations(), uniqueOn.isPresent()))) .collect(Collectors.toList()); } else { return ImmutableList.of( - new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); + new NamedLine( + "total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); } } catch (Exception e) { - log.error(String.format("Caught exception while getting time series chart: %s", e.getMessage())); + log.error( + String.format("Caught exception while getting time series chart: %s", e.getMessage())); return ImmutableList.of(); } } private int extractCount(MultiBucketsAggregation.Bucket bucket, boolean didUnique) { - return didUnique ? (int) bucket.getAggregations().get(UNIQUE).getValue() : (int) bucket.getDocCount(); + return didUnique + ? (int) bucket.getAggregations().get(UNIQUE).getValue() + : (int) bucket.getDocCount(); } - private List extractPointsFromAggregations(Aggregations aggregations, boolean didUnique) { - return aggregations.get(DATE_HISTOGRAM).getBuckets() - .stream() - .map(bucket -> new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) + private List extractPointsFromAggregations( + Aggregations aggregations, boolean didUnique) { + return aggregations.get(DATE_HISTOGRAM).getBuckets().stream() + .map( + bucket -> + new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public List getBarChart(String indexName, Optional dateRange, List dimensions, + public List getBarChart( + String indexName, + Optional dateRange, + List dimensions, // Length 1 or 2 - Map> filters, Map> mustNotFilters, Optional uniqueOn, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, boolean showMissing) { log.debug( - String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", indexName, dateRange, - dimensions) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", + indexName, dateRange, dimensions) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); assert (dimensions.size() == 1 || dimensions.size() == 2); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); @@ -142,7 +167,8 @@ public List getBarChart(String indexName, Optional dateRang } if (dimensions.size() == 2) { - TermsAggregationBuilder secondTermAgg = AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); + TermsAggregationBuilder secondTermAgg = + AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); if (showMissing) { secondTermAgg.missing(NA); } @@ -161,14 +187,24 @@ public List getBarChart(String indexName, Optional dateRang List barSegments = extractBarSegmentsFromAggregations(aggregationResult, DIMENSION, uniqueOn.isPresent()); return barSegments.stream() - .map(segment -> new NamedBar(segment.getLabel(), - ImmutableList.of(BarSegment.builder().setLabel("Count").setValue(segment.getValue()).build()))) + .map( + segment -> + new NamedBar( + segment.getLabel(), + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(segment.getValue()) + .build()))) .collect(Collectors.toList()); } else { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedBar(bucket.getKeyAsString(), - extractBarSegmentsFromAggregations(bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedBar( + bucket.getKeyAsString(), + extractBarSegmentsFromAggregations( + bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) .collect(Collectors.toList()); } } catch (Exception e) { @@ -177,31 +213,41 @@ public List getBarChart(String indexName, Optional dateRang } } - private List extractBarSegmentsFromAggregations(Aggregations aggregations, String aggregationKey, - boolean didUnique) { - return aggregations.get(aggregationKey).getBuckets() - .stream() + private List extractBarSegmentsFromAggregations( + Aggregations aggregations, String aggregationKey, boolean didUnique) { + return aggregations.get(aggregationKey).getBuckets().stream() .map(bucket -> new BarSegment(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } public Row buildRow(String groupByValue, Function groupByValueToCell, int count) { List values = ImmutableList.of(groupByValue, String.valueOf(count)); - List cells = ImmutableList.of(groupByValueToCell.apply(groupByValue), - Cell.builder().setValue(String.valueOf(count)).build()); + List cells = + ImmutableList.of( + groupByValueToCell.apply(groupByValue), + Cell.builder().setValue(String.valueOf(count)).build()); return new Row(values, cells); } - public List getTopNTableChart(String indexName, Optional dateRange, String groupBy, - Map> filters, Map> mustNotFilters, Optional uniqueOn, - int maxRows, Function groupByValueToCell) { + public List getTopNTableChart( + String indexName, + Optional dateRange, + String groupBy, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, + int maxRows, + Function groupByValueToCell) { log.debug( - String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", indexName, dateRange, - groupBy) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", + indexName, dateRange, groupBy) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); - TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); + TermsAggregationBuilder termAgg = + AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); if (uniqueOn.isPresent()) { termAgg.order(BucketOrder.aggregation(UNIQUE, false)); termAgg.subAggregation(getUniqueQuery(uniqueOn.get())); @@ -212,10 +258,13 @@ public List getTopNTableChart(String indexName, Optional dateRan Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> buildRow(bucket.getKeyAsString(), groupByValueToCell, - extractCount(bucket, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + buildRow( + bucket.getKeyAsString(), + groupByValueToCell, + extractCount(bucket, uniqueOn.isPresent()))) .collect(Collectors.toList()); } catch (Exception e) { log.error(String.format("Caught exception while getting top n chart: %s", e.getMessage())); @@ -223,11 +272,16 @@ public List getTopNTableChart(String indexName, Optional dateRan } } - public int getHighlights(String indexName, Optional dateRange, Map> filters, - Map> mustNotFilters, Optional uniqueOn) { + public int getHighlights( + String indexName, + Optional dateRange, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + String.format( - "filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); uniqueOn.ifPresent(s -> filteredAgg.subAggregation(getUniqueQuery(s))); @@ -246,7 +300,8 @@ public int getHighlights(String indexName, Optional dateRange, Mapget(FILTERED); } catch (Exception e) { @@ -266,11 +322,14 @@ private Filter executeAndExtract(SearchRequest searchRequest) { } } - private AggregationBuilder getFilteredAggregation(Map> mustFilters, - Map> mustNotFilters, Optional dateRange) { + private AggregationBuilder getFilteredAggregation( + Map> mustFilters, + Map> mustNotFilters, + Optional dateRange) { BoolQueryBuilder filteredQuery = QueryBuilders.boolQuery(); mustFilters.forEach((key, values) -> filteredQuery.must(QueryBuilders.termsQuery(key, values))); - mustNotFilters.forEach((key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); + mustNotFilters.forEach( + (key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); dateRange.ifPresent(range -> filteredQuery.must(dateRangeQuery(range))); return AggregationBuilders.filter(FILTERED, filteredQuery); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index 42f4e25c010ef..be7f4d2f0897a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -31,16 +31,17 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - @Slf4j public class AnalyticsUtil { - private AnalyticsUtil() { - } + private AnalyticsUtil() {} public static Cell buildCellWithSearchLandingPage(String query) { Cell result = new Cell(); result.setValue(query); - result.setLinkParams(LinkParams.builder().setSearchParams(SearchParams.builder().setQuery(query).build()).build()); + result.setLinkParams( + LinkParams.builder() + .setSearchParams(SearchParams.builder().setQuery(query).build()) + .build()); return result; } @@ -50,70 +51,138 @@ public static Cell buildCellWithEntityLandingPage(String urn) { try { Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); result.setEntity(entity); - result.setLinkParams(LinkParams.builder() - .setEntityProfileParams(EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) - .build()); + result.setLinkParams( + LinkParams.builder() + .setEntityProfileParams( + EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) + .build()); } catch (URISyntaxException e) { log.error("Malformed urn {} in table", urn, e); } return result; } - public static void hydrateDisplayNameForBars(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { + public static void hydrateDisplayNameForBars( + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { Map urnToDisplayName = - getUrnToDisplayName(entityClient, bars.stream().map(NamedBar::getName).collect(Collectors.toList()), entityName, - aspectNames, extractDisplayName, authentication); + getUrnToDisplayName( + entityClient, + bars.stream().map(NamedBar::getName).collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.setName(urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); + bars.forEach( + namedBar -> + namedBar.setName( + urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); } - public static void hydrateDisplayNameForSegments(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, - bars.stream().flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)).collect(Collectors.toList()), - entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForSegments( + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + entityClient, + bars.stream() + .flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.getSegments() - .forEach(segment -> segment.setLabel(urnToDisplayName.getOrDefault(segment.getLabel(), segment.getLabel())))); + bars.forEach( + namedBar -> + namedBar + .getSegments() + .forEach( + segment -> + segment.setLabel( + urnToDisplayName.getOrDefault( + segment.getLabel(), segment.getLabel())))); } - public static void hydrateDisplayNameForTable(EntityClient entityClient, List rows, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, rows.stream() - .flatMap(row -> row.getCells().stream().filter(cell -> cell.getEntity() != null).map(Cell::getValue)) - .collect(Collectors.toList()), entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForTable( + EntityClient entityClient, + List rows, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + entityClient, + rows.stream() + .flatMap( + row -> + row.getCells().stream() + .filter(cell -> cell.getEntity() != null) + .map(Cell::getValue)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - rows.forEach(row -> row.getCells().forEach(cell -> { - if (cell.getEntity() != null) { - cell.setValue(urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); - } - })); + rows.forEach( + row -> + row.getCells() + .forEach( + cell -> { + if (cell.getEntity() != null) { + cell.setValue( + urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); + } + })); } - public static Map getUrnToDisplayName(EntityClient entityClient, List urns, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Set uniqueUrns = urns.stream().distinct().map(urnStr -> { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toSet()); - Map aspects = entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); - return aspects.entrySet() - .stream() - .map(entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) + public static Map getUrnToDisplayName( + EntityClient entityClient, + List urns, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Set uniqueUrns = + urns.stream() + .distinct() + .map( + urnStr -> { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + Map aspects = + entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); + return aspects.entrySet().stream() + .map( + entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) .filter(pair -> pair.getValue().isPresent()) .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().get())); } public static Optional getDomainName(EntityResponse entityResponse) { - EnvelopedAspect domainProperties = entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + EnvelopedAspect domainProperties = + entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (domainProperties == null) { return Optional.empty(); } @@ -126,13 +195,17 @@ public static Optional getPlatformName(EntityResponse entityResponse) { if (envelopedDataPlatformInfo == null) { return Optional.empty(); } - DataPlatformInfo dataPlatformInfo = new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + DataPlatformInfo dataPlatformInfo = + new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); return Optional.of( - dataPlatformInfo.getDisplayName() == null ? dataPlatformInfo.getName() : dataPlatformInfo.getDisplayName()); + dataPlatformInfo.getDisplayName() == null + ? dataPlatformInfo.getName() + : dataPlatformInfo.getDisplayName()); } public static Optional getDatasetName(EntityResponse entityResponse) { - EnvelopedAspect envelopedDatasetKey = entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); + EnvelopedAspect envelopedDatasetKey = + entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); if (envelopedDatasetKey == null) { return Optional.empty(); } @@ -141,7 +214,8 @@ public static Optional getDatasetName(EntityResponse entityResponse) { } public static Optional getTermName(EntityResponse entityResponse) { - EnvelopedAspect envelopedTermInfo = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + EnvelopedAspect envelopedTermInfo = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); if (envelopedTermInfo != null) { GlossaryTermInfo glossaryTermInfo = new GlossaryTermInfo(envelopedTermInfo.getValue().data()); if (glossaryTermInfo.hasName()) { @@ -150,11 +224,13 @@ public static Optional getTermName(EntityResponse entityResponse) { } // if name is not set on GlossaryTermInfo or there is no GlossaryTermInfo - EnvelopedAspect envelopedGlossaryTermKey = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); + EnvelopedAspect envelopedGlossaryTermKey = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); if (envelopedGlossaryTermKey == null) { return Optional.empty(); } - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); + GlossaryTermKey glossaryTermKey = + new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); return Optional.of(glossaryTermKey.getName()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 03e63c7fb472f..6ba3777d476cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,36 +1,37 @@ package com.linkedin.datahub.graphql.authorization; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; - import java.time.Clock; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class AuthorizationUtils { private static final Clock CLOCK = Clock.systemUTC(); public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp().setTime(CLOCK.millis()).setActor(UrnUtils.getUrn(context.getActorUrn())); + return new AuditStamp() + .setTime(CLOCK.millis()) + .setActor(UrnUtils.getUrn(context.getActorUrn())); } public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } public static boolean canManagePolicies(@Nonnull QueryContext context) { @@ -38,7 +39,8 @@ public static boolean canManagePolicies(@Nonnull QueryContext context) { } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { @@ -46,21 +48,20 @@ public static boolean canManageTokens(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Domains. This is true if the user has the 'Manage Domains' or 'Create Domains' platform privilege. + * Returns true if the current used is able to create Domains. This is true if the user has the + * 'Manage Domains' or 'Create Domains' platform privilege. */ public static boolean canCreateDomains(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageDomains(@Nonnull QueryContext context) { @@ -68,21 +69,20 @@ public static boolean canManageDomains(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Tags. This is true if the user has the 'Manage Tags' or 'Create Tags' platform privilege. + * Returns true if the current used is able to create Tags. This is true if the user has the + * 'Manage Tags' or 'Create Tags' platform privilege. */ public static boolean canCreateTags(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageTags(@Nonnull QueryContext context) { @@ -90,48 +90,59 @@ public static boolean canManageTags(@Nonnull QueryContext context) { } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized(context, Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return isAuthorized( + context, + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), + PoliciesConfig.DELETE_ENTITY_PRIVILEGE); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); + public static boolean canEditGroupMembers( + @Nonnull String groupUrnStr, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized(context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, - groupUrnStr, orPrivilegeGroups); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + CORP_GROUP_ENTITY_NAME, + groupUrnStr, + orPrivilegeGroups); } public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { @@ -142,31 +153,39 @@ public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } - public static boolean canEditEntityQueries(@Nonnull List entityUrns, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); - return entityUrns.stream().allMatch(entityUrn -> - isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups - )); - } - - public static boolean canCreateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canEditEntityQueries( + @Nonnull List entityUrns, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); + return entityUrns.stream() + .allMatch( + entityUrn -> + isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups)); + } + + public static boolean canCreateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to create a query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canUpdateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canUpdateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to update any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canDeleteQuery(@Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canDeleteQuery( + @Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to remove any query. return canEditEntityQueries(subjectUrns, context); } @@ -177,15 +196,16 @@ public static boolean isAuthorized( @Nonnull PoliciesConfig.Privilege privilege) { final Authorizer authorizer = context.getAuthorizer(); final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + final ConjunctivePrivilegeGroup andGroup = + new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); + return AuthUtil.isAuthorized( + authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); } public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); } @@ -194,13 +214,10 @@ public static boolean isAuthorized( @Nonnull String actor, @Nonnull String resourceType, @Nonnull String resource, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); } - private AuthorizationUtils() { } - + private AuthorizationUtils() {} } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java index a09dc8741cd29..69e0ed0625b2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthenticationException extends GraphQLException { - public AuthenticationException(String message) { - super(message); - } + public AuthenticationException(String message) { + super(message); + } - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } + public AuthenticationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java index 803af09e079d1..30568e45938c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.exception; - -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthorizationException extends DataHubGraphQLException { public AuthorizationException(String message) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 8d3f5d5cea9eb..7c3ea1d581b6e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -13,7 +13,8 @@ public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public DataFetcherExceptionHandlerResult onException( + DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index 15c539a608cc0..f007a8b7c7adb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.exception; +import static graphql.Assert.*; + import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -11,9 +13,6 @@ import java.util.List; import java.util.Map; -import static graphql.Assert.*; - - @PublicApi public class DataHubGraphQLError implements GraphQLError { @@ -23,7 +22,11 @@ public class DataHubGraphQLError implements GraphQLError { private final List locations; private final Map extensions; - public DataHubGraphQLError(String message, ResultPath path, SourceLocation sourceLocation, DataHubGraphQLErrorCode errorCode) { + public DataHubGraphQLError( + String message, + ResultPath path, + SourceLocation sourceLocation, + DataHubGraphQLErrorCode errorCode) { this.path = assertNotNull(path).toList(); this.errorCode = assertNotNull(errorCode); this.locations = Collections.singletonList(sourceLocation); @@ -90,4 +93,3 @@ public int hashCode() { return GraphqlErrorHelper.hashCode(this); } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java index 3d3c54e2febb2..75096a8c4148e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java @@ -2,7 +2,6 @@ import graphql.GraphQLException; - public class DataHubGraphQLException extends GraphQLException { private final DataHubGraphQLErrorCode code; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java index 2ee9838af5428..87a1aebb02f2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when an unexpected value is provided by the client. - */ +/** Exception thrown when an unexpected value is provided by the client. */ public class ValidationException extends GraphQLException { - public ValidationException(String message) { - super(message); - } + public ValidationException(String message) { + super(message); + } - public ValidationException(String message, Throwable cause) { - super(message, cause); - } + public ValidationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 4d6133f18df05..07bd1fba5d8a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import lombok.Data; - @Data public class FeatureFlags { private boolean showSimplifiedHomepageByDefault = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java index e228cb8445c02..9faf00e0211bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java @@ -1,26 +1,25 @@ package com.linkedin.datahub.graphql.resolvers; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthorizationRequest; +import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import java.util.List; import java.util.Optional; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; public class AuthUtils { - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean isAuthorized( - String principal, - List privilegeGroup, - Authorizer authorizer) { + String principal, List privilegeGroup, Authorizer authorizer) { for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = new AuthorizationRequest(principal, privilege, Optional.empty()); + final AuthorizationRequest request = + new AuthorizationRequest(principal, privilege, Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { return false; @@ -29,6 +28,5 @@ public static boolean isAuthorized( return true; } - - private AuthUtils() { } + private AuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java index 2520b55c24e25..570ea322be7a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java @@ -2,29 +2,28 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthenticationException; - import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - /** - * Checks whether the user is currently authenticated & if so delegates execution to a child resolver. + * Checks whether the user is currently authenticated & if so delegates execution to a child + * resolver. */ @Deprecated public final class AuthenticatedResolver implements DataFetcher { - private final DataFetcher _resolver; + private final DataFetcher _resolver; - public AuthenticatedResolver(final DataFetcher resolver) { - _resolver = resolver; - } + public AuthenticatedResolver(final DataFetcher resolver) { + _resolver = resolver; + } - @Override - public final T get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - if (context.isAuthenticated()) { - return _resolver.get(environment); - } - throw new AuthenticationException("Failed to authenticate the current user."); + @Override + public final T get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + if (context.isAuthenticated()) { + return _resolver.get(environment); } + throw new AuthenticationException("Failed to authenticate the current user."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 930c98ee7113a..5ab07701c15a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -2,18 +2,17 @@ import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; public class BatchLoadUtils { - private BatchLoadUtils() { } + private BatchLoadUtils() {} public static CompletableFuture> batchLoadEntitiesOfSameType( List entities, @@ -24,9 +23,10 @@ public static CompletableFuture> batchLoadEntitiesOfSameType( } // Assume all entities are of the same type final com.linkedin.datahub.graphql.types.EntityType filteredEntity = - Iterables.getOnlyElement(entityTypes.stream() - .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) - .collect(Collectors.toList())); + Iterables.getOnlyElement( + entityTypes.stream() + .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) + .collect(Collectors.toList())); final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); List keyList = new ArrayList(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java index b0f23e63177e6..aba781f9e1dc7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java @@ -7,9 +7,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service Storage Entities + * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service + * Storage Entities */ public class EntityTypeMapper { @@ -44,10 +44,10 @@ public class EntityTypeMapper { .build(); private static final Map ENTITY_NAME_TO_TYPE = - ENTITY_TYPE_TO_NAME.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); + ENTITY_TYPE_TO_NAME.entrySet().stream() + .collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); - private EntityTypeMapper() { - } + private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 02921b453e315..b480e287adb9b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -23,17 +26,12 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * GraphQL resolver responsible for resolving information about the currently - * logged in User, including - * - * 1. User profile information - * 2. User privilege information, i.e. which features to display in the UI. + * GraphQL resolver responsible for resolving information about the currently logged in User, + * including * + *

1. User profile information 2. User privilege information, i.e. which features to display in + * the UI. */ public class MeResolver implements DataFetcher> { @@ -48,114 +46,123 @@ public MeResolver(final EntityClient entityClient, final FeatureFlags featureFla @Override public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - // 1. Get currently logged in user profile. - final Urn userUrn = Urn.createFromString(context.getActorUrn()); - final EntityResponse gmsUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - Collections.singleton(userUrn), null, context.getAuthentication()).get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); - - // 2. Get platform privileges - final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); - platformPrivileges.setViewAnalytics(canViewAnalytics(context)); - platformPrivileges.setManagePolicies(canManagePolicies(context)); - platformPrivileges.setManageIdentities(canManageUsersGroups(context)); - platformPrivileges.setGeneratePersonalAccessTokens(canGeneratePersonalAccessToken(context)); - platformPrivileges.setManageDomains(canManageDomains(context)); - platformPrivileges.setManageIngestion(canManageIngestion(context)); - platformPrivileges.setManageSecrets(canManageSecrets(context)); - platformPrivileges.setManageTokens(canManageTokens(context)); - platformPrivileges.setManageTests(canManageTests(context)); - platformPrivileges.setManageGlossaries(canManageGlossaries(context)); - platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); - platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); - platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); - platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); - platformPrivileges.setManageGlobalViews(AuthorizationUtils.canManageGlobalViews(context)); - platformPrivileges.setManageOwnershipTypes(AuthorizationUtils.canManageOwnershipTypes(context)); - platformPrivileges.setManageGlobalAnnouncements(AuthorizationUtils.canManageGlobalAnnouncements(context)); - - // Construct and return authenticated user object. - final AuthenticatedUser authUser = new AuthenticatedUser(); - authUser.setCorpUser(corpUser); - authUser.setPlatformPrivileges(platformPrivileges); - return authUser; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to fetch authenticated user!", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // 1. Get currently logged in user profile. + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + final EntityResponse gmsUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + null, + context.getAuthentication()) + .get(userUrn); + final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + + // 2. Get platform privileges + final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); + platformPrivileges.setViewAnalytics(canViewAnalytics(context)); + platformPrivileges.setManagePolicies(canManagePolicies(context)); + platformPrivileges.setManageIdentities(canManageUsersGroups(context)); + platformPrivileges.setGeneratePersonalAccessTokens( + canGeneratePersonalAccessToken(context)); + platformPrivileges.setManageDomains(canManageDomains(context)); + platformPrivileges.setManageIngestion(canManageIngestion(context)); + platformPrivileges.setManageSecrets(canManageSecrets(context)); + platformPrivileges.setManageTokens(canManageTokens(context)); + platformPrivileges.setManageTests(canManageTests(context)); + platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); + platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); + platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); + platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); + platformPrivileges.setManageGlobalViews( + AuthorizationUtils.canManageGlobalViews(context)); + platformPrivileges.setManageOwnershipTypes( + AuthorizationUtils.canManageOwnershipTypes(context)); + platformPrivileges.setManageGlobalAnnouncements( + AuthorizationUtils.canManageGlobalAnnouncements(context)); + + // Construct and return authenticated user object. + final AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setCorpUser(corpUser); + authUser.setPlatformPrivileges(platformPrivileges); + return authUser; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch authenticated user!", e); + } + }); } - /** - * Returns true if the authenticated user has privileges to view analytics. - */ + /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage policies analytics. - */ + /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage users & groups. - */ + /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to generate personal access tokens - */ + /** Returns true if the authenticated user has privileges to generate personal access tokens */ private boolean canGeneratePersonalAccessToken(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage (add or remove) tests. - */ + /** Returns true if the authenticated user has privileges to manage (add or remove) tests. */ private boolean canManageTests(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage domains - */ + /** Returns true if the authenticated user has privileges to manage domains */ private boolean canManageDomains(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage access tokens - */ + /** Returns true if the authenticated user has privileges to manage access tokens */ private boolean canManageTokens(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); } - /** - * Returns true if the authenticated user has privileges to manage glossaries - */ + /** Returns true if the authenticated user has privileges to manage glossaries */ private boolean canManageGlossaries(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage user credentials - */ + /** Returns true if the authenticated user has privileges to manage user credentials */ private boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } /** * Returns true if the provided actor is authorized for a particular privilege, false otherwise. */ - private boolean isAuthorized(final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); + private boolean isAuthorized( + final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); return AuthorizationResult.Type.ALLOW.equals(result.getType()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 2c2e71ee92eaa..244012d320b43 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,184 +32,198 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.metadata.Constants.*; - - public class ResolverUtils { - private static final Set KEYWORD_EXCLUDED_FILTERS = ImmutableSet.of( - "runId", - "_entityType" - ); - private static final ObjectMapper MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final Set KEYWORD_EXCLUDED_FILTERS = + ImmutableSet.of("runId", "_entityType"); + private static final ObjectMapper MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); + + private ResolverUtils() {} + + @Nonnull + public static T bindArgument(Object argument, Class clazz) { + return MAPPER.convertValue(argument, clazz); + } + + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } - - private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); - - private ResolverUtils() { } - - @Nonnull - public static T bindArgument(Object argument, Class clazz) { - return MAPPER.convertValue(argument, clazz); + return input; + } + + @Nonnull + public static Authentication getAuthentication(DataFetchingEnvironment environment) { + return ((QueryContext) environment.getContext()).getAuthentication(); + } + + /** + * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure + * that it is matched against a keyword filter in ElasticSearch. + * @param facetFilterInputs The list of facet filters inputs + * @param validFacetFields The set of valid fields against which to filter for. + * @return A map of filter definitions to be used in ElasticSearch. + */ + @Nonnull + public static Map buildFacetFilters( + @Nullable List facetFilterInputs, @Nonnull Set validFacetFields) { + if (facetFilterInputs == null) { + return Collections.emptyMap(); } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; - } - - @Nonnull - public static Authentication getAuthentication(DataFetchingEnvironment environment) { - return ((QueryContext) environment.getContext()).getAuthentication(); - } - - /** - * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure - * that it is matched against a keyword filter in ElasticSearch. - * - * @param facetFilterInputs The list of facet filters inputs - * @param validFacetFields The set of valid fields against which to filter for. - * @return A map of filter definitions to be used in ElasticSearch. - */ - @Nonnull - public static Map buildFacetFilters(@Nullable List facetFilterInputs, - @Nonnull Set validFacetFields) { - if (facetFilterInputs == null) { - return Collections.emptyMap(); - } - - final Map facetFilters = new HashMap<>(); - - facetFilterInputs.forEach(facetFilterInput -> { - if (!validFacetFields.contains(facetFilterInput.getField())) { - throw new ValidationException(String.format("Unrecognized facet with name %s provided", facetFilterInput.getField())); - } - if (!facetFilterInput.getValues().isEmpty()) { - facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); - } + final Map facetFilters = new HashMap<>(); + + facetFilterInputs.forEach( + facetFilterInput -> { + if (!validFacetFields.contains(facetFilterInput.getField())) { + throw new ValidationException( + String.format( + "Unrecognized facet with name %s provided", facetFilterInput.getField())); + } + if (!facetFilterInput.getValues().isEmpty()) { + facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); + } }); - return facetFilters; - } + return facetFilters; + } - public static List criterionListFromAndFilter(List andFilters) { - return andFilters != null && !andFilters.isEmpty() - ? andFilters.stream() + public static List criterionListFromAndFilter(List andFilters) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() .map(filter -> criterionFromFilter(filter)) - .collect(Collectors.toList()) : Collections.emptyList(); + .collect(Collectors.toList()) + : Collections.emptyList(); + } + + // In the case that user sends filters to be or-d together, we need to build a series of + // conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List orFilters) { + return new ConjunctiveCriterionArray( + orFilters.stream() + .map( + orFilter -> { + CriterionArray andCriterionForOr = + new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); + return new ConjunctiveCriterion().setAnd(andCriterionForOr); + }) + .collect(Collectors.toList())); + } + + @Nullable + public static Filter buildFilter( + @Nullable List andFilters, @Nullable List orFilters) { + if ((andFilters == null || andFilters.isEmpty()) + && (orFilters == null || orFilters.isEmpty())) { + return null; + } + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); } - // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion - // arrays, rather than just one for the AND case. - public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( - @Nonnull List orFilters - ) { - return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { - CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); - return new ConjunctiveCriterion().setAnd( - andCriterionForOr - ); - } - ).collect(Collectors.toList())); + // If or filters are not set, someone may be using the legacy and filters + final List andCriterions = criterionListFromAndFilter(andFilters); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + public static Criterion criterionFromFilter(final FacetFilterInput filter) { + return criterionFromFilter(filter, false); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter( + final FacetFilterInput filter, final Boolean skipKeywordSuffix) { + Criterion result = new Criterion(); + + if (skipKeywordSuffix) { + result.setField(filter.getField()); + } else { + result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); } - @Nullable - public static Filter buildFilter(@Nullable List andFilters, @Nullable List orFilters) { - if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { - return null; - } - - // Or filters are the new default. We will check them first. - // If we have OR filters, we need to build a series of CriterionArrays - if (orFilters != null && !orFilters.isEmpty()) { - return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); - } - - // If or filters are not set, someone may be using the legacy and filters - final List andCriterions = criterionListFromAndFilter(andFilters); - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + // `value` is deprecated in place of `values`- this is to support old query patterns. If values + // is provided, + // this statement will be skipped + if (filter.getValues() == null && filter.getValue() != null) { + result.setValues(new StringArray(filter.getValue())); + result.setValue(filter.getValue()); + } else if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } else { + result.setValues(new StringArray()); + result.setValue(""); } - public static Criterion criterionFromFilter(final FacetFilterInput filter) { - return criterionFromFilter(filter, false); + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); } - // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) - public static Criterion criterionFromFilter(final FacetFilterInput filter, final Boolean skipKeywordSuffix) { - Criterion result = new Criterion(); - - if (skipKeywordSuffix) { - result.setField(filter.getField()); - } else { - result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); - } - - // `value` is deprecated in place of `values`- this is to support old query patterns. If values is provided, - // this statement will be skipped - if (filter.getValues() == null && filter.getValue() != null) { - result.setValues(new StringArray(filter.getValue())); - result.setValue(filter.getValue()); - } else if (filter.getValues() != null) { - result.setValues(new StringArray(filter.getValues())); - if (!filter.getValues().isEmpty()) { - result.setValue(filter.getValues().get(0)); - } else { - result.setValue(""); - } - } else { - result.setValues(new StringArray()); - result.setValue(""); - } - - - if (filter.getCondition() != null) { - result.setCondition(Condition.valueOf(filter.getCondition().toString())); - } else { - result.setCondition(Condition.EQUAL); - } - - if (filter.getNegated() != null) { - result.setNegated(filter.getNegated()); - } - - return result; + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); } - private static String getFilterField(final String originalField, final boolean skipKeywordSuffix) { - if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { - return originalField; - } - return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + return result; + } + + private static String getFilterField( + final String originalField, final boolean skipKeywordSuffix) { + if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { + return originalField; } + return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + } - public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") + public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); - if (inputFilters == null) { - return QueryUtils.newFilter(urnMatchCriterion); - } - - // Add urn match criterion to each or clause - if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { - for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { - conjunctiveCriterion.getAnd().add(urnMatchCriterion); - } - return inputFilters; - } - return QueryUtils.newFilter(urnMatchCriterion); + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + if (inputFilters == null) { + return QueryUtils.newFilter(urnMatchCriterion); + } + + // Add urn match criterion to each or clause + if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { + for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { + conjunctiveCriterion.getAnd().add(urnMatchCriterion); + } + return inputFilters; } + return QueryUtils.newFilter(urnMatchCriterion); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index b5b13cc00b40d..2a074b950d0ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; @@ -26,13 +28,9 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * GraphQL Resolver used for fetching AssertionRunEvents. - */ -public class AssertionRunEventResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching AssertionRunEvents. */ +public class AssertionRunEventResolver + implements DataFetcher> { private final EntityClient _client; @@ -42,58 +40,72 @@ public AssertionRunEventResolver(final EntityClient client) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String urn = ((Assertion) environment.getSource()).getUrn(); - final String maybeStatus = environment.getArgumentOrDefault("status", null); - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; + final String urn = ((Assertion) environment.getSource()).getUrn(); + final String maybeStatus = environment.getArgumentOrDefault("status", null); + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; - try { - // Step 1: Fetch aspects from GMS - List aspects = _client.getTimeseriesAspectValues( - urn, - Constants.ASSERTION_ENTITY_NAME, - Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - buildFilter(maybeFilters, maybeStatus), - context.getAuthentication()); + try { + // Step 1: Fetch aspects from GMS + List aspects = + _client.getTimeseriesAspectValues( + urn, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilter(maybeFilters, maybeStatus), + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - List runEvents = aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + // Step 2: Bind profiles into GraphQL strong types. + List runEvents = + aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); - // Step 3: Package and return response. - final AssertionRunEventsResult result = new AssertionRunEventsResult(); - result.setTotal(runEvents.size()); - result.setFailed(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.FAILURE.equals( - runEvent.getResult().getType() - )).count())); - result.setSucceeded(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.SUCCESS.equals(runEvent.getResult().getType() - )).count())); - result.setRunEvents(runEvents); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 3: Package and return response. + final AssertionRunEventsResult result = new AssertionRunEventsResult(); + result.setTotal(runEvents.size()); + result.setFailed( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.FAILURE.equals( + runEvent.getResult().getType())) + .count())); + result.setSucceeded( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.SUCCESS.equals( + runEvent.getResult().getType())) + .count())); + result.setRunEvents(runEvents); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } @Nullable - public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final String status) { + public static Filter buildFilter( + @Nullable FilterInput filtersInput, @Nullable final String status) { if (filtersInput == null && status == null) { return null; } @@ -107,8 +119,14 @@ public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable f if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + facetFilters.stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 8006ae7d2a464..89912b2814e40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -19,63 +19,76 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver that deletes an Assertion. - */ +/** GraphQL Resolver that deletes an Assertion. */ @Slf4j -public class DeleteAssertionResolver implements DataFetcher> { +public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteAssertionResolver(final EntityClient entityClient, final EntityService entityService) { + public DeleteAssertionResolver( + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn assertionUrn = Urn.createFromString(environment.getArgument("urn")); - return CompletableFuture.supplyAsync(() -> { - - // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { - return true; - } - - if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { - try { - _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(assertionUrn, context.getAuthentication()); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for assertion with urn %s", assertionUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. check the entity exists. If not, return false. + if (!_entityService.exists(assertionUrn)) { return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against assertion with urn %s", assertionUrn), e); } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + + if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { + try { + _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + assertionUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for assertion with urn %s", + assertionUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against assertion with urn %s", assertionUrn), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - /** - * Determine whether the current user is allowed to remove an assertion. - */ - private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final Urn assertionUrn) { + /** Determine whether the current user is allowed to remove an assertion. */ + private boolean isAuthorizedToDeleteAssertion( + final QueryContext context, final Urn assertionUrn) { // 2. fetch the assertion info AssertionInfo info = - (AssertionInfo) EntityUtils.getAspectFromEntity( - assertionUrn.toString(), Constants.ASSERTION_INFO_ASPECT_NAME, _entityService, null); + (AssertionInfo) + EntityUtils.getAspectFromEntity( + assertionUrn.toString(), + Constants.ASSERTION_INFO_ASPECT_NAME, + _entityService, + null); if (info != null) { // 3. check whether the actor has permission to edit the assertions on the assertee @@ -86,11 +99,14 @@ private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final return true; } - private boolean isAuthorizedToDeleteAssertionFromAssertee(final QueryContext context, final Urn asserteeUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToDeleteAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -104,7 +120,8 @@ private Urn getAsserteeUrnFromInfo(final AssertionInfo info) { case DATASET: return info.getDatasetAssertion().getDataset(); default: - throw new RuntimeException(String.format("Unsupported Assertion Type %s provided", info.getType())); + throw new RuntimeException( + String.format("Unsupported Assertion Type %s provided", info.getType())); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index ff573bb59fba1..9814589df7651 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -26,11 +26,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of Assertions associated with an Entity. - */ -public class EntityAssertionsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityAssertionsResolver + implements DataFetcher> { private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; @@ -44,54 +42,60 @@ public EntityAssertionsResolver(final EntityClient entityClient, final GraphClie @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 200); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 200); - try { - // Step 1: Fetch set of assertions associated with the target entity from the Graph Store - final EntityRelationships relationships = _graphClient.getRelatedEntities( - entityUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - start, - count, - context.getActorUrn() - ); + try { + // Step 1: Fetch set of assertions associated with the target entity from the Graph + // Store + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + start, + count, + context.getActorUrn()); - final List assertionUrns = relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + final List assertionUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the assertion entities based on the urns from step 1 - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the assertion entities based on the urns from step 1 + final Map entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS assertion model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List assertions = gmsResults.stream() - .filter(Objects::nonNull) - .map(AssertionMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS assertion model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List assertions = + gmsResults.stream() + .filter(Objects::nonNull) + .map(AssertionMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final EntityAssertionsResult result = new EntityAssertionsResult(); - result.setCount(relationships.getCount()); - result.setStart(relationships.getStart()); - result.setTotal(relationships.getTotal()); - result.setAssertions(assertions); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 4: Package and return result + final EntityAssertionsResult result = new EntityAssertionsResult(); + result.setCount(relationships.getCount()); + result.setStart(relationships.getStart()); + result.setTotal(relationships.getTotal()); + result.setAssertions(assertions); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java index 8f5be1000bb45..9015ad0ebb210 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java @@ -5,13 +5,9 @@ import java.time.temporal.ChronoUnit; import java.util.Optional; - - public class AccessTokenUtil { - /** - * Convert an {@link AccessTokenDuration} into its milliseconds equivalent. - */ + /** Convert an {@link AccessTokenDuration} into its milliseconds equivalent. */ public static Optional mapDurationToMs(final AccessTokenDuration duration) { switch (duration) { case ONE_HOUR: @@ -29,9 +25,10 @@ public static Optional mapDurationToMs(final AccessTokenDuration duration) case NO_EXPIRY: return Optional.empty(); default: - throw new RuntimeException(String.format("Unrecognized access token duration %s provided", duration)); + throw new RuntimeException( + String.format("Unrecognized access token duration %s provided", duration)); } } - private AccessTokenUtil() { } + private AccessTokenUtil() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java index cd55d81aec6ad..14a1b9a1f7a01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatefulTokenService; @@ -10,10 +12,10 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.AccessTokenType; import com.linkedin.datahub.graphql.generated.CreateAccessTokenInput; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.metadata.Constants; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -22,12 +24,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for creating personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for creating personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class CreateAccessTokenResolver implements DataFetcher> { @@ -38,62 +35,85 @@ public CreateAccessTokenResolver(final StatefulTokenService statefulTokenService } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final CreateAccessTokenInput input = bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); - - log.info("User {} requesting new access token for user {} ", context.getActorUrn(), input.getActorUrn()); - - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Date date = new Date(); - final long createdAtInMs = date.getTime(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - - final String tokenName = input.getName(); - final String tokenDescription = input.getDescription(); - - final String accessToken = - _statefulTokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null), - createdAtInMs, tokenName, tokenDescription, context.getActorUrn()); - log.info("Generated access token for {} of type {} with duration {}", input.getActorUrn(), input.getType(), - input.getDuration()); - try { - final String tokenHash = _statefulTokenService.hash(accessToken); - - final AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - result.setMetadata(metadata); - - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new access token with name %s", input.getName()), - e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final CreateAccessTokenInput input = + bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); + + log.info( + "User {} requesting new access token for user {} ", + context.getActorUrn(), + input.getActorUrn()); + + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Date date = new Date(); + final long createdAtInMs = date.getTime(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + + final String tokenName = input.getName(); + final String tokenDescription = input.getDescription(); + + final String accessToken = + _statefulTokenService.generateAccessToken( + type, + createActor(input.getType(), actorUrn), + expiresInMs.orElse(null), + createdAtInMs, + tokenName, + tokenDescription, + context.getActorUrn()); + log.info( + "Generated access token for {} of type {} with duration {}", + input.getActorUrn(), + input.getType(), + input.getDuration()); + try { + final String tokenHash = _statefulTokenService.hash(accessToken); + + final AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn( + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + result.setMetadata(metadata); + + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new access token with name %s", input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final CreateAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final CreateAccessTokenInput input) { if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final CreateAccessTokenInput input) { + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final CreateAccessTokenInput input) { return AuthorizationUtils.canManageTokens(context) - || input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + || input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -101,6 +121,7 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java index 5ac4ec8ac3a6b..aed6bd6cb98af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatelessTokenService; @@ -18,12 +20,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for generating personal & service principal access tokens - */ +/** Resolver for generating personal & service principal access tokens */ @Slf4j public class GetAccessTokenResolver implements DataFetcher> { @@ -34,39 +31,49 @@ public GetAccessTokenResolver(final StatelessTokenService tokenService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final GetAccessTokenInput input = bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final GetAccessTokenInput input = + bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - final String accessToken = - _tokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); - AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - return result; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + final String accessToken = + _tokenService.generateAccessToken( + type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); + AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + return result; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final GetAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final GetAccessTokenInput input) { // Currently only an actor can generate a personal token for themselves. if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final GetAccessTokenInput input) { - return input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final GetAccessTokenInput input) { + return input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -74,14 +81,16 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, createUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } private Urn createUrn(final String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to validate provided urn %s", urnStr)); + throw new IllegalArgumentException( + String.format("Failed to validate provided urn %s", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index f9ba552d349e0..5cfa80e394c5f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,14 +25,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for listing personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for listing personal & service principal v2-type (stateful) access tokens. */ @Slf4j -public class ListAccessTokensResolver implements DataFetcher> { +public class ListAccessTokensResolver + implements DataFetcher> { private static final String EXPIRES_AT_FIELD_NAME = "expiresAt"; @@ -41,60 +39,87 @@ public ListAccessTokensResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final ListAccessTokenInput input = bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final ListAccessTokenInput input = + bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - log.info("User {} listing access tokens with filters {}", context.getActorUrn(), filters.toString()); + log.info( + "User {} listing access tokens with filters {}", + context.getActorUrn(), + filters.toString()); - if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); - final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, - getAuthentication(environment), new SearchFlags().setFulltext(true)); + if (AuthorizationUtils.canManageTokens(context) + || isListingSelfTokens(filters, context)) { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); + final SearchResult searchResult = + _entityClient.search( + Constants.ACCESS_TOKEN_ENTITY_NAME, + "", + buildFilter(filters, Collections.emptyList()), + sortCriterion, + start, + count, + getAuthentication(environment), + new SearchFlags().setFulltext(true)); - final List tokens = searchResult.getEntities().stream().map(entity -> { - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(entity.getEntity().toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - return metadata; - }).collect(Collectors.toList()); + final List tokens = + searchResult.getEntities().stream() + .map( + entity -> { + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn(entity.getEntity().toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + return metadata; + }) + .collect(Collectors.toList()); - final ListAccessTokenResult result = new ListAccessTokenResult(); - result.setTokens(tokens); - result.setStart(searchResult.getFrom()); - result.setCount(searchResult.getPageSize()); - result.setTotal(searchResult.getNumEntities()); + final ListAccessTokenResult result = new ListAccessTokenResult(); + result.setTokens(tokens); + result.setStart(searchResult.getFrom()); + result.setCount(searchResult.getPageSize()); + result.setTotal(searchResult.getNumEntities()); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list access tokens", e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list access tokens", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Utility method to answer: Does the existing security context have permissions to generate their personal tokens - * AND is the request coming in requesting those personal tokens? - *

- * Note: We look for the actorUrn field because a token generated by someone else means that the generator actor has - * manage all access token privileges which means that he/she will be bound to just listing their own tokens. + * Utility method to answer: Does the existing security context have permissions to generate their + * personal tokens AND is the request coming in requesting those personal tokens? + * + *

Note: We look for the actorUrn field because a token generated by someone else means that + * the generator actor has manage all access token privileges which means that he/she will be + * bound to just listing their own tokens. * * @param filters The filters being used in the request. * @param context Current security context. * @return A boolean stating if the current user can list its personal tokens. */ - private boolean isListingSelfTokens(final List filters, final QueryContext context) { - return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); + private boolean isListingSelfTokens( + final List filters, final QueryContext context) { + return AuthorizationUtils.canGeneratePersonalAccessToken(context) + && filters.stream() + .anyMatch( + filter -> + filter.getField().equals("ownerUrn") + && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java index 252c0eaba6e85..8d0a23e665b1b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.token.StatefulTokenService; import com.google.common.collect.ImmutableSet; import com.linkedin.access.token.DataHubAccessTokenInfo; @@ -18,42 +20,39 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for revoking personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for revoking personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class RevokeAccessTokenResolver implements DataFetcher> { private final EntityClient _entityClient; private final StatefulTokenService _statefulTokenService; - public RevokeAccessTokenResolver(final EntityClient entityClient, final StatefulTokenService statefulTokenService) { + public RevokeAccessTokenResolver( + final EntityClient entityClient, final StatefulTokenService statefulTokenService) { _entityClient = entityClient; _statefulTokenService = statefulTokenService; } @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); - log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); + log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); - if (isAuthorizedToRevokeToken(context, tokenId)) { - try { - _statefulTokenService.revokeAccessToken(tokenId); - } catch (Exception e) { - throw new RuntimeException("Failed to revoke access token", e); - } - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToRevokeToken(context, tokenId)) { + try { + _statefulTokenService.revokeAccessToken(tokenId); + } catch (Exception e) { + throw new RuntimeException("Failed to revoke access token", e); + } + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private boolean isAuthorizedToRevokeToken(final QueryContext context, final String tokenId) { @@ -62,12 +61,17 @@ private boolean isAuthorizedToRevokeToken(final QueryContext context, final Stri private boolean isOwnerOfAccessToken(final QueryContext context, final String tokenId) { try { - final EntityResponse entityResponse = _entityClient.getV2(Constants.ACCESS_TOKEN_ENTITY_NAME, - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), - ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), context.getAuthentication()); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), + ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { + final DataMap data = + entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(data); return tokenInfo.getOwnerUrn().toString().equals(context.getActorUrn()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java index 4a1964b36032c..40c91b43850f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java @@ -1,61 +1,65 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowsePathsInput; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.Collections; -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowsePathsResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); - - private final Map> _typeToEntity; - - public BrowsePathsResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } - - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final BrowsePathsInput input = bindArgument(environment.getArgument("input"), BrowsePathsInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Fetch browse paths. entity type: %s, urn: %s", - input.getType(), - input.getUrn())); - if (_typeToEntity.containsKey(input.getType())) { - return _typeToEntity.get(input.getType()).browsePaths(input.getUrn(), environment.getContext()); - } - // Browse path is impl detail. - return Collections.emptyList(); - } catch (Exception e) { - _logger.error("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()) + " " + e.getMessage()); - throw new RuntimeException("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()), e); + private static final Logger _logger = + LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); + + private final Map> _typeToEntity; + + public BrowsePathsResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } + + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final BrowsePathsInput input = + bindArgument(environment.getArgument("input"), BrowsePathsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Fetch browse paths. entity type: %s, urn: %s", + input.getType(), input.getUrn())); + if (_typeToEntity.containsKey(input.getType())) { + return _typeToEntity + .get(input.getType()) + .browsePaths(input.getUrn(), environment.getContext()); } + // Browse path is impl detail. + return Collections.emptyList(); + } catch (Exception e) { + _logger.error( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java index 9c95eceb1e78f..287d0eef8aec8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java @@ -1,77 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowseInput; import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowseResolver implements DataFetcher> { - private static final int DEFAULT_START = 0; - private static final int DEFAULT_COUNT = 10; + private static final int DEFAULT_START = 0; + private static final int DEFAULT_COUNT = 10; - private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); + private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public BrowseResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } + public BrowseResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); - final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; - final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; + final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; + final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count)); - return _typeToEntity.get(input.getType()).browse( - input.getPath(), - input.getFilters(), - start, - count, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count) + " " + e.getMessage()); - throw new RuntimeException("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count), e); - } + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count)); + return _typeToEntity + .get(input.getType()) + .browse( + input.getPath(), input.getFilters(), start, count, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java index 81f82c93f1fa7..396d91c37d81c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.browse; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.types.BrowsableEntityType; -import com.linkedin.datahub.graphql.generated.BrowsePath; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; public class EntityBrowsePathsResolver implements DataFetcher>> { @@ -24,12 +24,14 @@ public CompletableFuture> get(DataFetchingEnvironment environme final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - return CompletableFuture.supplyAsync(() -> { - try { - return _browsableType.browsePaths(urn, context); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _browsableType.browsePaths(urn, context); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 76abddc9a99a9..292d6108b7a04 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.chart; +import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; @@ -17,18 +21,13 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -52,30 +51,40 @@ public CompletableFuture get(DataFetchingEnvironment environmen // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); - return CompletableFuture.supplyAsync(() -> { - try { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - final String pathStr = input.getPath().size() > 0 ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + return CompletableFuture.supplyAsync( + () -> { + try { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + final String pathStr = + input.getPath().size() > 0 + ? BROWSE_PATH_V2_DELIMITER + + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) + : ""; + final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); - BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, - pathStr, - maybeResolvedView != null - ? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter()) - : filter, - sanitizedQuery, - start, - count, - context.getAuthentication() - ); - return mapBrowseResults(browseResults); - } catch (Exception e) { - throw new RuntimeException("Failed to execute browse V2", e); - } - }); + BrowseResultV2 browseResults = + _entityClient.browseV2( + entityName, + pathStr, + maybeResolvedView != null + ? SearchUtils.combineFilters( + filter, maybeResolvedView.getDefinition().getFilter()) + : filter, + sanitizedQuery, + start, + count, + context.getAuthentication()); + return mapBrowseResults(browseResults); + } catch (Exception e) { + throw new RuntimeException("Failed to execute browse V2", e); + } + }); } private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { @@ -85,28 +94,29 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { results.setCount(browseResults.getPageSize()); List groups = new ArrayList<>(); - browseResults.getGroups().forEach(group -> { - BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); - browseGroup.setName(group.getName()); - browseGroup.setCount(group.getCount()); - browseGroup.setHasSubGroups(group.isHasSubGroups()); - if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); - } - groups.add(browseGroup); - }); + browseResults + .getGroups() + .forEach( + group -> { + BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); + browseGroup.setName(group.getName()); + browseGroup.setCount(group.getCount()); + browseGroup.setHasSubGroups(group.isHasSubGroups()); + if (group.hasUrn() && group.getUrn() != null) { + browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + } + groups.add(browseGroup); + }); results.setGroups(groups); BrowseResultMetadata resultMetadata = new BrowseResultMetadata(); - resultMetadata.setPath(Arrays.stream(browseResults.getMetadata().getPath() - .split(BROWSE_PATH_V2_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()) - ); + resultMetadata.setPath( + Arrays.stream(browseResults.getMetadata().getPath().split(BROWSE_PATH_V2_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList())); resultMetadata.setTotalNumEntities(browseResults.getMetadata().getTotalNumEntities()); results.setMetadata(resultMetadata); return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index 207da02de6ec2..a2d04a26bfa97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -11,24 +11,23 @@ import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; - @Slf4j -public class ChartStatsSummaryResolver implements DataFetcher> { +public class ChartStatsSummaryResolver + implements DataFetcher> { private final TimeseriesAspectService timeseriesAspectService; private final Cache summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) - .build(); + this.summaryCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(6, TimeUnit.HOURS).build(); } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { // Not yet implemented return CompletableFuture.completedFuture(null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index f6bc68caa0821..34f7f133f6fb9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -35,10 +35,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * Resolver responsible for serving app configurations to the React UI. - */ +/** Resolver responsible for serving app configurations to the React UI. */ public class AppConfigResolver implements DataFetcher> { private final GitVersion _gitVersion; @@ -82,7 +79,8 @@ public AppConfigResolver( } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); @@ -103,19 +101,20 @@ public CompletableFuture get(final DataFetchingEnvironment environmen final PoliciesConfig policiesConfig = new PoliciesConfig(); policiesConfig.setEnabled(_authorizationConfiguration.getDefaultAuthorizer().isEnabled()); - policiesConfig.setPlatformPrivileges(com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES - .stream() - .map(this::mapPrivilege) - .collect(Collectors.toList())); + policiesConfig.setPlatformPrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES.stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); - policiesConfig.setResourcePrivileges(com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES - .stream() - .map(this::mapResourcePrivileges) - .collect(Collectors.toList()) - ); + policiesConfig.setResourcePrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES.stream() + .map(this::mapResourcePrivileges) + .collect(Collectors.toList())); final IdentityManagementConfig identityManagementConfig = new IdentityManagementConfig(); - identityManagementConfig.setEnabled(true); // Identity Management always enabled. TODO: Understand if there's a case where this should change. + identityManagementConfig.setEnabled( + true); // Identity Management always enabled. TODO: Understand if there's a case where this + // should change. final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig(); ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled()); @@ -133,7 +132,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); - queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize()); + queriesTabConfig.setQueriesTabResultSize( + _visualConfiguration.getQueriesTab().getQueriesTabResultSize()); visualConfig.setQueriesTab(queriesTabConfig); } if (_visualConfiguration != null && _visualConfiguration.getEntityProfile() != null) { @@ -148,7 +148,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen if (_visualConfiguration != null && _visualConfiguration.getSearchResult() != null) { SearchResultsVisualConfig searchResultsVisualConfig = new SearchResultsVisualConfig(); if (_visualConfiguration.getSearchResult().getEnableNameHighlight() != null) { - searchResultsVisualConfig.setEnableNameHighlight(_visualConfiguration.getSearchResult().getEnableNameHighlight()); + searchResultsVisualConfig.setEnableNameHighlight( + _visualConfiguration.getSearchResult().getEnableNameHighlight()); } visualConfig.setSearchResult(searchResultsVisualConfig); } @@ -166,14 +167,15 @@ public CompletableFuture get(final DataFetchingEnvironment environmen viewsConfig.setEnabled(_viewsConfiguration.isEnabled()); appConfig.setViewsConfig(viewsConfig); - final FeatureFlagsConfig featureFlagsConfig = FeatureFlagsConfig.builder() - .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) - .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) - .setShowBrowseV2(_featureFlags.isShowBrowseV2()) - .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) - .setShowAccessManagement(_featureFlags.isShowAccessManagement()) - .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) - .build(); + final FeatureFlagsConfig featureFlagsConfig = + FeatureFlagsConfig.builder() + .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) + .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) + .setShowBrowseV2(_featureFlags.isShowBrowseV2()) + .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setShowAccessManagement(_featureFlags.isShowAccessManagement()) + .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -185,14 +187,17 @@ private ResourcePrivileges mapResourcePrivileges( final ResourcePrivileges graphQLPrivileges = new ResourcePrivileges(); graphQLPrivileges.setResourceType(resourcePrivileges.getResourceType()); graphQLPrivileges.setResourceTypeDisplayName(resourcePrivileges.getResourceTypeDisplayName()); - graphQLPrivileges.setEntityType(mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); + graphQLPrivileges.setEntityType( + mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); graphQLPrivileges.setPrivileges( - resourcePrivileges.getPrivileges().stream().map(this::mapPrivilege).collect(Collectors.toList()) - ); + resourcePrivileges.getPrivileges().stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); return graphQLPrivileges; } - private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { + private Privilege mapPrivilege( + com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { final Privilege graphQLPrivilege = new Privilege(); graphQLPrivilege.setType(privilege.getType()); graphQLPrivilege.setDisplayName(privilege.getDisplayName()); @@ -202,29 +207,53 @@ private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfi private EntityType mapResourceTypeToEntityType(final String resourceType) { // TODO: Is there a better way to instruct the UI to present a searchable resource? - if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES.getResourceType().equals(resourceType)) { + if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATASET; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DASHBOARD; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CHART; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_FLOW; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_JOB; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.TAG; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_TERM; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_NODE; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DOMAIN; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CONTAINER; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_GROUP; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_USER; } else { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 4b8bd37a4fabe..58f7715c3e627 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; @@ -20,21 +22,16 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j public class ContainerEntitiesResolver implements DataFetcher> { - static final List CONTAINABLE_ENTITY_NAMES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME - ); + static final List CONTAINABLE_ENTITY_NAMES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME); private static final String CONTAINER_FIELD_NAME = "container"; private static final String INPUT_ARG_NAME = "input"; private static final String DEFAULT_QUERY = "*"; @@ -55,45 +52,53 @@ public ContainerEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Container) environment.getSource()).getUrn(); - final ContainerEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final ContainerEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : "*"; final int start = input.getStart() != null ? input.getStart() : 0; final int count = input.getCount() != null ? input.getCount() : 20; - return CompletableFuture.supplyAsync(() -> { - - try { - - final Criterion filterCriterion = new Criterion() - .setField(CONTAINER_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - CONTAINABLE_ENTITY_NAMES, - query, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with container with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final Criterion filterCriterion = + new Criterion() + .setField(CONTAINER_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + CONTAINABLE_ENTITY_NAMES, + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve entities associated with container with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 90fad4ca4578a..9502fb8e5cb93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -12,15 +14,13 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; - -public class ParentContainersResolver implements DataFetcher> { +public class ParentContainersResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -28,21 +28,25 @@ public ParentContainersResolver(final EntityClient entityClient) { _entityClient = entityClient; } - private void aggregateParentContainers(List containers, String urn, QueryContext context) { + private void aggregateParentContainers( + List containers, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(CONTAINER_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); - EntityResponse response = _entityClient.getV2(containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { Container mappedContainer = ContainerMapper.map(response); containers.add(mappedContainer); @@ -61,16 +65,17 @@ public CompletableFuture get(DataFetchingEnvironment env final String urn = ((Entity) environment.getSource()).getUrn(); final List containers = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - aggregateParentContainers(containers, urn, context); - final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); - return result; - } catch (DataHubGraphQLException e) { - throw new RuntimeException("Failed to load all containers", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + aggregateParentContainers(containers, urn, context); + final ParentContainersResult result = new ParentContainersResult(); + result.setCount(containers.size()); + result.setContainers(containers); + return result; + } catch (DataHubGraphQLException e) { + throw new RuntimeException("Failed to load all containers", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index db125384745a1..b5480359bde6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpUser; -import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; +import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.query.filter.Filter; @@ -19,10 +21,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - @Slf4j -public class DashboardStatsSummaryResolver implements DataFetcher> { +public class DashboardStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -32,63 +33,72 @@ public class DashboardStatsSummaryResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - final DashboardStatsSummary result = new DashboardStatsSummary(); - - // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. - List dashboardUsageMetrics = - getDashboardUsageMetrics(resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); - if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); - } - - // Obtain unique user statistics, by rolling up unique users over the past month. - List userUsageCounts = getDashboardUsagePerUser(resourceUrn); - result.setUniqueUserCountLast30Days(userUsageCounts.size()); - result.setTopUsersLast30Days( - trimUsers(userUsageCounts.stream().map(DashboardUserUsageCounts::getUser).collect(Collectors.toList()))); - - this.summaryCache.put(resourceUrn, result); - return result; - - } catch (Exception e) { - log.error(String.format("Failed to load dashboard usage summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } + + try { + + final DashboardStatsSummary result = new DashboardStatsSummary(); + + // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. + List dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + if (dashboardUsageMetrics.size() > 0) { + result.setViewCount(getDashboardViewCount(resourceUrn)); + } + + // Obtain unique user statistics, by rolling up unique users over the past month. + List userUsageCounts = getDashboardUsagePerUser(resourceUrn); + result.setUniqueUserCountLast30Days(userUsageCounts.size()); + result.setTopUsersLast30Days( + trimUsers( + userUsageCounts.stream() + .map(DashboardUserUsageCounts::getUser) + .collect(Collectors.toList()))); + + this.summaryCache.put(resourceUrn, result); + return result; + + } catch (Exception e) { + log.error( + String.format( + "Failed to load dashboard usage summary for resource %s", + resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private int getDashboardViewCount(final Urn resourceUrn) { - List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), - null, - null, - 1, - this.timeseriesAspectService); + List dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } private List getDashboardUsagePerUser(final Urn resourceUrn) { long now = System.currentTimeMillis(); long nowMinusOneMonth = timeMinusOneMonth(now); - Filter bucketStatsFilter = createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); + Filter bucketStatsFilter = + createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } @@ -98,4 +108,4 @@ private List trimUsers(final List originalUsers) { } return originalUsers; } - } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 24e1db33e9d40..07d028b07b01d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; @@ -26,16 +28,14 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - /** * Resolver used for resolving the usage statistics of a Dashboard. - *

- * Returns daily as well as absolute usage metrics of Dashboard + * + *

Returns daily as well as absolute usage metrics of Dashboard */ @Slf4j -public class DashboardUsageStatsResolver implements DataFetcher> { +public class DashboardUsageStatsResolver + implements DataFetcher> { private static final String ES_FIELD_EVENT_GRANULARITY = "eventGranularity"; private final TimeseriesAspectService timeseriesAspectService; @@ -44,34 +44,40 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); // Max number of aspects to return for absolute dashboard usage. final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - return CompletableFuture.supplyAsync(() -> { - DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); + return CompletableFuture.supplyAsync( + () -> { + DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); - // Time Bucket Stats - Filter bucketStatsFilter = createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); - List dailyUsageBuckets = getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); + // Time Bucket Stats + Filter bucketStatsFilter = + createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); + List dailyUsageBuckets = + getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); - usageQueryResult.setBuckets(dailyUsageBuckets); - usageQueryResult.setAggregations(aggregations); + usageQueryResult.setBuckets(dailyUsageBuckets); + usageQueryResult.setAggregations(aggregations); - // Absolute usage metrics - List dashboardUsageMetrics = - getDashboardUsageMetrics(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); - usageQueryResult.setMetrics(dashboardUsageMetrics); - return usageQueryResult; - }); + // Absolute usage metrics + List dashboardUsageMetrics = + getDashboardUsageMetrics( + dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + usageQueryResult.setMetrics(dashboardUsageMetrics); + return usageQueryResult; + }); } - private List getDashboardUsageMetrics(String dashboardUrn, Long maybeStartTimeMillis, - Long maybeEndTimeMillis, Integer maybeLimit) { + private List getDashboardUsageMetrics( + String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { List dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -79,16 +85,26 @@ private List getDashboardUsageMetrics(String dashboardUrn // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); List aspects = - timeseriesAspectService.getAspectValues(Urn.createFromString(dashboardUrn), Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit, + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 462c18ea33dd4..4f170a296c47e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -32,7 +32,6 @@ import java.util.List; import java.util.stream.Collectors; - public class DashboardUsageStatsUtils { public static final String ES_FIELD_URN = "urn"; @@ -49,15 +48,17 @@ public static List getDashboardUsageMetrics( List dashboardUsageMetrics; try { Filter filter = createUsageFilter(dashboardUrn, null, null, false); - List aspects = timeseriesAspectService.getAspectValues( - Urn.createFromString(dashboardUrn), - Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + List aspects = + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + filter); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } @@ -69,8 +70,10 @@ public static DashboardUsageQueryResultAggregations getAggregations( List dailyUsageBuckets, TimeseriesAspectService timeseriesAspectService) { - List userUsageCounts = getUserUsageCounts(filter, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = new DashboardUsageQueryResultAggregations(); + List userUsageCounts = + getUserUsageCounts(filter, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + new DashboardUsageQueryResultAggregations(); aggregations.setUsers(userUsageCounts); aggregations.setUniqueUserCount(userUsageCounts.size()); @@ -99,29 +102,47 @@ public static DashboardUsageQueryResultAggregations getAggregations( } public static List getBuckets( - Filter filter, - String dashboardUrn, - TimeseriesAspectService timeseriesAspectService) { + Filter filter, String dashboardUrn, TimeseriesAspectService timeseriesAspectService) { AggregationSpec usersCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountAggregation = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("viewsCount"); AggregationSpec executionsCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("executionsCount"); AggregationSpec usersCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("viewsCount"); AggregationSpec executionsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{usersCountAggregation, viewsCountAggregation, executionsCountAggregation, - usersCountCardinalityAggregation, viewsCountCardinalityAggregation, executionsCountCardinalityAggregation}; - GenericTable dailyStats = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, - createUsageGroupingBuckets(CalendarInterval.DAY)); + new AggregationSpec[] { + usersCountAggregation, + viewsCountAggregation, + executionsCountAggregation, + usersCountCardinalityAggregation, + viewsCountCardinalityAggregation, + executionsCountCardinalityAggregation + }; + GenericTable dailyStats = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + createUsageGroupingBuckets(CalendarInterval.DAY)); List buckets = new ArrayList<>(); for (StringArray row : dailyStats.getRows()) { @@ -130,7 +151,8 @@ public static List getBuckets( usageAggregation.setDuration(WindowDuration.DAY); usageAggregation.setResource(dashboardUrn); - DashboardUsageAggregationMetrics usageAggregationMetrics = new DashboardUsageAggregationMetrics(); + DashboardUsageAggregationMetrics usageAggregationMetrics = + new DashboardUsageAggregationMetrics(); if (!row.get(1).equals(ES_NULL_VALUE) && !row.get(4).equals(ES_NULL_VALUE)) { try { @@ -156,7 +178,8 @@ public static List getBuckets( usageAggregationMetrics.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert executionsCount from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert executionsCount from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -165,34 +188,59 @@ public static List getBuckets( return buckets; } - public static List getUserUsageCounts(Filter filter, TimeseriesAspectService timeseriesAspectService) { + public static List getUserUsageCounts( + Filter filter, TimeseriesAspectService timeseriesAspectService) { // Sum aggregation on userCounts.count AggregationSpec sumUsageCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.usageCount"); AggregationSpec sumViewCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.viewsCount"); AggregationSpec sumExecutionCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.executionsCount"); AggregationSpec usageCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.usageCount"); AggregationSpec viewCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.viewsCount"); AggregationSpec executionCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY) + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) .setFieldPath("userCounts.executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{sumUsageCountsCountAggSpec, sumViewCountsCountAggSpec, sumExecutionCountsCountAggSpec, - usageCountsCardinalityAggSpec, viewCountsCardinalityAggSpec, executionCountsCardinalityAggSpec}; + new AggregationSpec[] { + sumUsageCountsCountAggSpec, + sumViewCountsCountAggSpec, + sumExecutionCountsCountAggSpec, + usageCountsCardinalityAggSpec, + viewCountsCardinalityAggSpec, + executionCountsCardinalityAggSpec + }; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend - GenericTable result = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, groupingBuckets); + GenericTable result = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -208,7 +256,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setUsageCount(Integer.valueOf(row.get(1))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE) && row.get(5).equals(ES_NULL_VALUE)) { @@ -217,7 +266,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setViewsCount(Integer.valueOf(row.get(2))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user views count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user views count from ES to int", e); } } if (!row.get(3).equals(ES_NULL_VALUE) && !row.get(6).equals(ES_NULL_VALUE)) { @@ -226,7 +276,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user executions count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user executions count from ES to int", e); } } userUsageCounts.add(userUsageCount); @@ -239,17 +290,15 @@ public static List getUserUsageCounts(Filter filter, T private static GroupingBucket[] createUsageGroupingBuckets(CalendarInterval calenderInterval) { GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(calenderInterval)); - return new GroupingBucket[]{timestampBucket}; + return new GroupingBucket[] {timestampBucket}; } public static Filter createUsageFilter( - String dashboardUrn, - Long startTime, - Long endTime, - boolean byBucket) { + String dashboardUrn, Long startTime, Long endTime, boolean byBucket) { Filter filter = new Filter(); final ArrayList criteria = new ArrayList<>(); @@ -260,44 +309,55 @@ public static Filter createUsageFilter( if (startTime != null) { // Add filter for start time - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(Long.toString(startTime)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(Long.toString(startTime)); criteria.add(startTimeCriterion); } if (endTime != null) { // Add filter for end time - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(Long.toString(endTime)); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(Long.toString(endTime)); criteria.add(endTimeCriterion); } if (byBucket) { - // Add filter for presence of eventGranularity - only consider bucket stats and not absolute stats + // Add filter for presence of eventGranularity - only consider bucket stats and not absolute + // stats // since unit is mandatory, we assume if eventGranularity contains unit, then it is not null Criterion onlyTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.CONTAIN).setValue("unit"); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.CONTAIN) + .setValue("unit"); criteria.add(onlyTimeBucketsCriterion); } else { // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); } - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } - public static Long timeMinusOneMonth(long time) { final long oneHourMillis = 60 * 60 * 1000; final long oneDayMillis = 24 * oneHourMillis; return time - (31 * oneDayMillis + 1); } - private DashboardUsageStatsUtils() { } + private DashboardUsageStatsUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java index 9c32fa1c08076..f5d4f949e5710 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -8,15 +10,12 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -27,54 +26,80 @@ public class BatchSetDataProductResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDataProductInput input = bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); + final BatchSetDataProductInput input = + bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); final String maybeDataProductUrn = input.getDataProductUrn(); final List resources = input.getResourceUrns(); - return CompletableFuture.supplyAsync(() -> { - - verifyResources(resources, context); - verifyDataProduct(maybeDataProductUrn, context); + return CompletableFuture.supplyAsync( + () -> { + verifyResources(resources, context); + verifyDataProduct(maybeDataProductUrn, context); - try { - List resourceUrns = resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (maybeDataProductUrn != null) { - batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); - } else { - batchUnsetDataProduct(resourceUrns, context); - } - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + List resourceUrns = + resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + if (maybeDataProductUrn != null) { + batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); + } else { + batchUnsetDataProduct(resourceUrns, context); + } + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void verifyResources(List resources, QueryContext context) { for (String resource : resources) { - if (!_dataProductService.verifyEntityExists(UrnUtils.getUrn(resource), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, %s in resources does not exist", resource)); + if (!_dataProductService.verifyEntityExists( + UrnUtils.getUrn(resource), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, %s in resources does not exist", resource)); } Urn resourceUrn = UrnUtils.getUrn(resource); - if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity( + context, resourceUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } } private void verifyDataProduct(String maybeDataProductUrn, QueryContext context) { - if (maybeDataProductUrn != null && !_dataProductService.verifyEntityExists(UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, Data Product urn %s does not exist", maybeDataProductUrn)); + if (maybeDataProductUrn != null + && !_dataProductService.verifyEntityExists( + UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, Data Product urn %s does not exist", + maybeDataProductUrn)); } } - private void batchSetDataProduct(@Nonnull String dataProductUrn, List resources, QueryContext context) { - log.debug("Batch setting Data Product. dataProduct urn: {}, resources: {}", dataProductUrn, resources); + private void batchSetDataProduct( + @Nonnull String dataProductUrn, List resources, QueryContext context) { + log.debug( + "Batch setting Data Product. dataProduct urn: {}, resources: {}", + dataProductUrn, + resources); try { - _dataProductService.batchSetDataProduct(UrnUtils.getUrn(dataProductUrn), resources, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.batchSetDataProduct( + UrnUtils.getUrn(dataProductUrn), + resources, + context.getAuthentication(), + UrnUtils.getUrn(context.getActorUrn())); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Data Product %s to resources with urns %s!", dataProductUrn, resources), e); + throw new RuntimeException( + String.format( + "Failed to batch set Data Product %s to resources with urns %s!", + dataProductUrn, resources), + e); } } @@ -82,10 +107,14 @@ private void batchUnsetDataProduct(List resources, QueryContext context) { log.debug("Batch unsetting Data Product. resources: {}", resources); try { for (Urn resource : resources) { - _dataProductService.unsetDataProduct(resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.unsetDataProduct( + resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch unset data product for resources with urns %s!", resources), e); + throw new RuntimeException( + String.format( + "Failed to batch unset data product for resources with urns %s!", resources), + e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index f644ff31a571b..10c487a839f35 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -12,13 +14,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class CreateDataProductResolver implements DataFetcher> { @@ -26,37 +25,45 @@ public class CreateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateDataProductInput input = bindArgument(environment.getArgument("input"), CreateDataProductInput.class); + final CreateDataProductInput input = + bindArgument(environment.getArgument("input"), CreateDataProductInput.class); final Authentication authentication = context.getAuthentication(); final Urn domainUrn = UrnUtils.getUrn(input.getDomainUrn()); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Domain provided dos not exist"); - } - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - final Urn dataProductUrn = _dataProductService.createDataProduct( - input.getProperties().getName(), - input.getProperties().getDescription(), - authentication); - _dataProductService.setDomain(dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new DataProduct from input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Domain provided dos not exist"); + } + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + final Urn dataProductUrn = + _dataProductService.createDataProduct( + input.getProperties().getName(), + input.getProperties().getDescription(), + authentication); + _dataProductService.setDomain( + dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new DataProduct from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java index 596e292e7fe33..f6fe11a587a39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java @@ -7,25 +7,27 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.authorization.PoliciesConfig; -import lombok.extern.slf4j.Slf4j; - import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DataProductAuthorizationUtils { - private DataProductAuthorizationUtils() { + private DataProductAuthorizationUtils() {} - } - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDataProductsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -35,11 +37,14 @@ public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryCo orPrivilegeGroups); } - public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext context, Urn domainUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToManageDataProducts( + @Nonnull QueryContext context, Urn domainUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -49,10 +54,10 @@ public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext con orPrivilegeGroups); } - public static boolean isAuthorizedToEditDataProduct(@Nonnull QueryContext context, Urn dataProductUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP - )); + public static boolean isAuthorizedToEditDataProduct( + @Nonnull QueryContext context, Urn dataProductUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(ALL_PRIVILEGES_GROUP)); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java index fd31e2199c22a..ea13f96cfc1bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java @@ -9,11 +9,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class DeleteDataProductResolver implements DataFetcher> { @@ -21,32 +20,38 @@ public class DeleteDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } - - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } - - try { - _dataProductService.deleteDataProduct(dataProductUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Data Product", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } + + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } + + try { + _dataProductService.deleteDataProduct(dataProductUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Data Product", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 831d449bef9ef..a0f1698bf99e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -22,18 +25,14 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * Resolver responsible for getting the assets belonging to a Data Product. Get the assets from the @@ -41,7 +40,8 @@ */ @Slf4j @RequiredArgsConstructor -public class ListDataProductAssetsResolver implements DataFetcher> { +public class ListDataProductAssetsResolver + implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -52,7 +52,10 @@ public class ListDataProductAssetsResolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); // get urn from either input or source (in the case of "entities" field) - final String urn = environment.getArgument("urn") != null ? environment.getArgument("urn") : ((DataProduct) environment.getSource()).getUrn(); + final String urn = + environment.getArgument("urn") != null + ? environment.getArgument("urn") + : ((DataProduct) environment.getSource()).getUrn(); final Urn dataProductUrn = UrnUtils.getUrn(urn); final SearchAcrossEntitiesInput input = bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); @@ -60,32 +63,52 @@ public CompletableFuture get(DataFetchingEnvironment environment) // 1. Get urns of assets belonging to Data Product using an aspect query List assetUrns = new ArrayList<>(); try { - final EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data(); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse != null + && entityResponse + .getAspects() + .containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + final DataMap data = + entityResponse + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); final DataProductProperties dataProductProperties = new DataProductProperties(data); if (dataProductProperties.hasAssets()) { - assetUrns.addAll(dataProductProperties.getAssets().stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList())); + assetUrns.addAll( + dataProductProperties.getAssets().stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList())); } } } catch (Exception e) { log.error(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); - throw new RuntimeException(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to list data product assets with urn %s", dataProductUrn), e); } // 2. Get list of entities that we should query based on filters or assets from aspect. - List entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); - - - final List inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).distinct().collect(Collectors.toList()); - - final List finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; + List entitiesToQuery = + assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); + + final List inputEntityTypes = + (input.getTypes() == null || input.getTypes().isEmpty()) + ? ImmutableList.of() + : input.getTypes(); + final List inputEntityNames = + inputEntityTypes.stream() + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + + final List finalEntityNames = + inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); @@ -93,49 +116,64 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - // if no assets in data product properties, exit early before search and return empty results - if (assetUrns.size() == 0) { - SearchResults results = new SearchResults(); - results.setStart(start); - results.setCount(count); - results.setTotal(0); - results.setSearchResults(ImmutableList.of()); - return results; - } - - // add urns from the aspect to our filters - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } - - try { - log.debug( - "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - finalEntityNames, - sanitizedQuery, - finalFilter, - start, - count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // if no assets in data product properties, exit early before search and return empty + // results + if (assetUrns.size() == 0) { + SearchResults results = new SearchResults(); + results.setStart(start); + results.setCount(count); + results.setTotal(0); + results.setSearchResults(ImmutableList.of()); + return results; + } + + // add urns from the aspect to our filters + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); + + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } + + try { + log.debug( + "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + finalEntityNames, + sanitizedQuery, + finalFilter, + start, + count, + searchFlags, + null, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 79afddbb873fb..304ef96d90aa5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,13 +15,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class UpdateDataProductResolver implements DataFetcher> { @@ -27,43 +26,51 @@ public class UpdateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateDataProductInput input = bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); + final UpdateDataProductInput input = + bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } - try { - final Urn urn = _dataProductService.updateDataProduct( - dataProductUrn, - input.getName(), - input.getDescription(), - authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); - } - }); + try { + final Urn urn = + _dataProductService.updateDataProduct( + dataProductUrn, input.getName(), input.getDescription(), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(urn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java index 1587df4c9899b..604c46a1f7c01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java @@ -39,13 +39,11 @@ import lombok.Data; import lombok.extern.slf4j.Slf4j; - /** * Resolver used for resolving the Health state of a Dataset. * - * Currently, the health status is calculated via the validation on a Dataset. If there are no validations found, the - * health status will be undefined for the Dataset. - * + *

Currently, the health status is calculated via the validation on a Dataset. If there are no + * validations found, the health status will be undefined for the Dataset. */ @Slf4j public class DatasetHealthResolver implements DataFetcher>> { @@ -60,47 +58,48 @@ public class DatasetHealthResolver implements DataFetcher _statusCache; public DatasetHealthResolver( - final GraphClient graphClient, - final TimeseriesAspectService timeseriesAspectService) { + final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService) { this(graphClient, timeseriesAspectService, new Config(true)); - } + public DatasetHealthResolver( final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService, final Config config) { _graphClient = graphClient; _timeseriesAspectService = timeseriesAspectService; - _statusCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(1, TimeUnit.MINUTES) - .build(); + _statusCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(1, TimeUnit.MINUTES).build(); _config = config; } @Override - public CompletableFuture> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final Dataset parent = environment.getSource(); - return CompletableFuture.supplyAsync(() -> { - try { - final CachedHealth cachedStatus = _statusCache.get(parent.getUrn(), () -> ( - computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); - return cachedStatus.healths; - } catch (Exception e) { - throw new RuntimeException("Failed to resolve dataset's health status.", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> + (computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve dataset's health status.", e); + } + }); } /** * Computes the "resolved health status" for a Dataset by * - * - fetching active (non-deleted) assertions - * - fetching latest assertion run for each - * - checking whether any of the assertions latest runs are failing - * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing */ - private CachedHealth computeHealthStatusForDataset(final String datasetUrn, final QueryContext context) { + private CachedHealth computeHealthStatusForDataset( + final String datasetUrn, final QueryContext context) { final List healthStatuses = new ArrayList<>(); if (_config.getAssertionsEnabled()) { @@ -113,31 +112,33 @@ private CachedHealth computeHealthStatusForDataset(final String datasetUrn, fina } /** - * Returns the resolved "assertions health", which is currently a static function of whether the most recent run of - * all dataset assertions has succeeded. + * Returns the resolved "assertions health", which is currently a static function of whether the + * most recent run of all dataset assertions has succeeded. * * @param datasetUrn the dataset to compute health for * @param context the query context * @return an instance of {@link Health} for the Dataset, null if one cannot be computed. */ @Nullable - private Health computeAssertionHealthForDataset(final String datasetUrn, final QueryContext context) { + private Health computeAssertionHealthForDataset( + final String datasetUrn, final QueryContext context) { // Get active assertion urns - final EntityRelationships relationships = _graphClient.getRelatedEntities( - datasetUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - 0, - 500, - context.getActorUrn() - ); + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + datasetUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); if (relationships.getTotal() > 0) { // If there are assertions defined, then we should return a non-null health for this asset. - final Set activeAssertionUrns = relationships.getRelationships() - .stream() - .map(relationship -> relationship.getEntity().toString()).collect(Collectors.toSet()); + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); final GenericTable assertionRunResults = getAssertionRunsTable(datasetUrn); @@ -146,22 +147,24 @@ private Health computeAssertionHealthForDataset(final String datasetUrn, final Q return null; } - final List failingAssertionUrns = getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); // Finally compute & return the health. final Health health = new Health(); health.setType(HealthStatusType.ASSERTIONS); if (failingAssertionUrns.size() > 0) { health.setStatus(HealthStatus.FAIL); - health.setMessage(String.format("%s of %s assertions are failing", failingAssertionUrns.size(), - activeAssertionUrns.size())); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); health.setCauses(failingAssertionUrns); } else { health.setStatus(HealthStatus.PASS); health.setMessage("All assertions are passing"); } return health; - } return null; } @@ -175,7 +178,8 @@ private GenericTable getAssertionRunsTable(final String asserteeUrn) { createAssertionGroupingBuckets()); } - private List getFailingAssertionUrns(final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { // Create the buckets based on the result return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); } @@ -191,12 +195,15 @@ private Filter createAssertionsFilter(final String datasetUrn) { // Add filter for result == result Criterion startTimeCriterion = - new Criterion().setField("status").setCondition(Condition.EQUAL).setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); criteria.add(startTimeCriterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } @@ -205,31 +212,38 @@ private AggregationSpec[] createAssertionAggregationSpecs() { AggregationSpec resultTypeAggregation = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); AggregationSpec timestampAggregation = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("timestampMillis"); - return new AggregationSpec[]{resultTypeAggregation, timestampAggregation}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; } private GroupingBucket[] createAssertionGroupingBuckets() { // String grouping bucket on "assertionUrn" GroupingBucket assertionUrnBucket = new GroupingBucket(); assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - return new GroupingBucket[]{assertionUrnBucket}; + return new GroupingBucket[] {assertionUrnBucket}; } - private List resultToFailedAssertionUrns(final StringArrayArray rows, final Set activeAssertionUrns) { + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { final List failedAssertionUrns = new ArrayList<>(); for (StringArray row : rows) { // Result structure should be assertionUrn, event.result.type, timestampMillis if (row.size() != 3) { - throw new RuntimeException(String.format( - "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", row.size())); + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); } final String assertionUrn = row.get(0); final String resultType = row.get(1); - // If assertion is "active" (not deleted) & is failing, then we report a degradation in health. - if (activeAssertionUrns.contains(assertionUrn) && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { failedAssertionUrns.add(assertionUrn); } } @@ -246,4 +260,4 @@ public static class Config { private static class CachedHealth { private final List healths; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 2873866bb34f7..74fbd9c2c868a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -24,13 +24,13 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - /** * This resolver is a thin wrapper around the {@link DatasetUsageStatsResolver} which simply * computes some aggregate usage metrics for a Dashboard. */ @Slf4j -public class DatasetStatsSummaryResolver implements DataFetcher> { +public class DatasetStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -40,53 +40,64 @@ public class DatasetStatsSummaryResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } - try { + try { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view profile information for dataset {}", + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), resourceUrn.toString()); - return null; - } - - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); - - final DatasetStatsSummary result = new DatasetStatsSummary(); - result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); - result.setUniqueUserCountLast30Days(usageQueryResult.getAggregations().getUniqueUserCount()); - if (usageQueryResult.getAggregations().hasUsers()) { - result.setTopUsersLast30Days(trimUsers(usageQueryResult.getAggregations().getUsers() - .stream() - .filter(UserUsageCounts::hasUser) - .sorted((a, b) -> (b.getCount() - a.getCount())) - .map(userCounts -> createPartialUser(Objects.requireNonNull(userCounts.getUser()))) - .collect(Collectors.toList()))); - } - this.summaryCache.put(resourceUrn, result); - return result; - } catch (Exception e) { - log.error(String.format("Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return null; + } + + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); + + final DatasetStatsSummary result = new DatasetStatsSummary(); + result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); + result.setUniqueUserCountLast30Days( + usageQueryResult.getAggregations().getUniqueUserCount()); + if (usageQueryResult.getAggregations().hasUsers()) { + result.setTopUsersLast30Days( + trimUsers( + usageQueryResult.getAggregations().getUsers().stream() + .filter(UserUsageCounts::hasUser) + .sorted((a, b) -> (b.getCount() - a.getCount())) + .map( + userCounts -> + createPartialUser(Objects.requireNonNull(userCounts.getUser()))) + .collect(Collectors.toList()))); + } + this.summaryCache.put(resourceUrn, result); + return result; + } catch (Exception e) { + log.error( + String.format( + "Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private List trimUsers(final List originalUsers) { @@ -103,8 +114,9 @@ private CorpUser createPartialUser(final Urn userUrn) { } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index e4bec8e896fdf..75288ec989c79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -17,7 +17,6 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DatasetUsageStatsResolver implements DataFetcher> { @@ -28,30 +27,35 @@ public DatasetUsageStatsResolver(final UsageClient usageClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final UsageTimeRange range = UsageTimeRange.valueOf(environment.getArgument("range")); - return CompletableFuture.supplyAsync(() -> { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view usage information for dataset {}", - context.getActorUrn(), - resourceUrn.toString()); - return null; - } - try { - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view usage information for dataset {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + try { + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), range); + return UsageQueryResultMapper.map(usageQueryResult); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + } + }); } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, + return AuthorizationUtils.isAuthorized( + context, Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 75c09d0cf7e43..62c88c506ba61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,16 +1,20 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -23,13 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor @@ -37,48 +37,61 @@ public class UpdateDeprecationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateDeprecationInput input = bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); + final UpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); final Urn entityUrn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateDeprecationInput( - entityUrn, - _entityService - ); - try { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DEPRECATION_ASPECT_NAME, - _entityService, - new Deprecation()); - updateDeprecation(deprecation, input, context); - - // Create the Deprecation aspect - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(entityUrn, DEPRECATION_ASPECT_NAME, deprecation); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to update Deprecation for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to update Deprecation for resource with entity urn %s", entityUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateDeprecationInput(entityUrn, _entityService); + try { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), + DEPRECATION_ASPECT_NAME, + _entityService, + new Deprecation()); + updateDeprecation(deprecation, input, context); + + // Create the Deprecation aspect + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + entityUrn, DEPRECATION_ASPECT_NAME, deprecation); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to update Deprecation for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update Deprecation for resource with entity urn %s", entityUrn), + e); + } + }); } - private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext context, final Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToUpdateDeprecationForEntity( + final QueryContext context, final Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -88,20 +101,19 @@ private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext cont orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); + String.format( + "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); } return true; } - private static void updateDeprecation(Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { + private static void updateDeprecation( + Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { deprecation.setDeprecated(input.getDeprecated()); deprecation.setDecommissionTime(input.getDecommissionTime(), SetMode.REMOVE_IF_NULL); if (input.getNote() != null) { @@ -115,9 +127,10 @@ private static void updateDeprecation(Deprecation deprecation, UpdateDeprecation } catch (URISyntaxException e) { // Should never happen. throw new RuntimeException( - String.format("Failed to convert authorized actor into an Urn. actor urn: %s", - context.getActorUrn()), + String.format( + "Failed to convert authorized actor into an Urn. actor urn: %s", + context.getActorUrn()), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 1930cdc1f8667..9099394d32bd0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -23,22 +28,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS privilege. + * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -51,71 +49,101 @@ public class CreateDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateDomainInput input = bindArgument(environment.getArgument("input"), CreateDomainInput.class); - final Urn parentDomain = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canCreateDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - // Create the Domain Key - final DomainKey key = new DomainKey(); - - // Take user provided id OR generate a random UUID for the domain. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setId(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Domain already exists!"); - } - - if (parentDomain != null && !_entityClient.exists(parentDomain, context.getAuthentication())) { - throw new IllegalArgumentException("Parent Domain does not exist!"); - } - - if (DomainUtils.hasNameConflict(input.getName(), parentDomain, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, DOMAIN_ENTITY_NAME, - DOMAIN_PROPERTIES_ASPECT_NAME, mapDomainProperties(input, context)); - proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); - - String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return domainUrn; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + final CreateDomainInput input = + bindArgument(environment.getArgument("input"), CreateDomainInput.class); + final Urn parentDomain = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final DomainKey key = new DomainKey(); + + // Take user provided id OR generate a random UUID for the domain. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Domain already exists!"); + } + + if (parentDomain != null + && !_entityClient.exists(parentDomain, context.getAuthentication())) { + throw new IllegalArgumentException("Parent Domain does not exist!"); + } + + if (DomainUtils.hasNameConflict( + input.getName(), parentDomain, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + DOMAIN_ENTITY_NAME, + DOMAIN_PROPERTIES_ASPECT_NAME, + mapDomainProperties(input, context)); + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); + + String domainUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + OwnerUtils.addCreatorAsOwner( + context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return domainUrn; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Domain with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Domain with id: %s, name: %s", + input.getId(), input.getName()), + e); + } + }); } - private DomainProperties mapDomainProperties(final CreateDomainInput input, final QueryContext context) { + private DomainProperties mapDomainProperties( + final CreateDomainInput input, final QueryContext context) { final DomainProperties result = new DomainProperties(); result.setName(input.getName()); result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - result.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + result.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); if (input.getParentDomain() != null) { try { result.setParentDomain(Urn.createFromString(input.getParentDomain())); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), e); + throw new RuntimeException( + String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), + e); } } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java index 9ab90e8b4ff72..c863f2e581dcb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteDomainResolver implements DataFetcher> { @@ -25,37 +22,49 @@ public DeleteDomainResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String domainUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(domainUrn); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageDomains(context) + || AuthorizationUtils.canDeleteEntity(urn, context)) { + try { + // Make sure there are no child domains + if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { + throw new RuntimeException( + String.format("Cannot delete domain %s which has child domains", domainUrn)); + } - if (AuthorizationUtils.canManageDomains(context) || AuthorizationUtils.canDeleteEntity(urn, context)) { - try { - // Make sure there are no child domains - if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { - throw new RuntimeException(String.format("Cannot delete domain %s which has child domains", domainUrn)); - } + _entityClient.deleteEntity(urn, context.getAuthentication()); + log.info( + String.format("I've successfully deleted the entity %s with urn", domainUrn)); - _entityClient.deleteEntity(urn, context.getAuthentication()); - log.info(String.format("I've successfully deleted the entity %s with urn", domainUrn)); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Domain with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for Domain with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", domainUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", domainUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 0bf551c4683e6..8f6d109e71b2c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; @@ -19,13 +22,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolves the entities in a particular Domain. - */ +/** Resolves the entities in a particular Domain. */ @Slf4j public class DomainEntitiesResolver implements DataFetcher> { @@ -49,50 +46,65 @@ public DomainEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Domain) environment.getSource()).getUrn(); - final DomainEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final DomainEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : DEFAULT_QUERY; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - try { - - final CriterionArray criteria = new CriterionArray(); - final Criterion filterCriterion = new Criterion() - .setField(DOMAINS_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - criteria.add(filterCriterion); - if (input.getFilters() != null) { - input.getFilters().forEach(filter -> { - criteria.add(new Criterion().setField(filter.getField()).setValue(filter.getValue())); - }); - } - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - query, - new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(criteria))), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with Domain with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final CriterionArray criteria = new CriterionArray(); + final Criterion filterCriterion = + new Criterion() + .setField(DOMAINS_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + criteria.add(filterCriterion); + if (input.getFilters() != null) { + input + .getFilters() + .forEach( + filter -> { + criteria.add( + new Criterion() + .setField(filter.getField()) + .setValue(filter.getValue())); + }); + } + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(criteria))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to resolve entities associated with Domain with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 3a751e502eb10..5453603f4cc9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,18 +21,14 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS + * platform privilege. */ public class ListDomainsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -43,47 +42,56 @@ public ListDomainsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final Urn parentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); + return CompletableFuture.supplyAsync( + () -> { + final ListDomainsInput input = + bindArgument(environment.getArgument("input"), ListDomainsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final Urn parentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); - try { - // First, get all domain Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.DOMAIN_ENTITY_NAME, - query, - filter, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all domain Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.DOMAIN_ENTITY_NAME, + query, + filter, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListDomainsResult result = new ListDomainsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list domains", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListDomainsResult result = new ListDomainsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setDomains( + mapUnresolvedDomains( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list domains", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Domain objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedDomains(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index dcaa7d61ed90c..8406e19810468 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -9,51 +11,53 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; - public class ParentDomainsResolver implements DataFetcher> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public ParentDomainsResolver(final EntityClient entityClient) { - _entityClient = entityClient; + public ParentDomainsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final List parentDomains = new ArrayList<>(); + final Set visitedParentUrns = new HashSet<>(); + + if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Failed to resolve parents for entity type %s", urn)); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - final List parentDomains = new ArrayList<>(); - final Set visitedParentUrns = new HashSet<>(); - - if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { - throw new IllegalArgumentException(String.format("Failed to resolve parents for entity type %s", urn)); - } - - return CompletableFuture.supplyAsync(() -> { - try { - Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); - - while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { - parentDomains.add(parentDomain); - visitedParentUrns.add(parentDomain.getUrn()); - parentDomain = DomainUtils.getParentDomain(Urn.createFromString(parentDomain.getUrn()), context, _entityClient); - } - - final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load parent domains for entity %s", urn), e); + return CompletableFuture.supplyAsync( + () -> { + try { + Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); + + while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { + parentDomains.add(parentDomain); + visitedParentUrns.add(parentDomain.getUrn()); + parentDomain = + DomainUtils.getParentDomain( + Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + + final ParentDomainsResult result = new ParentDomainsResult(); + result.setCount(parentDomains.size()); + result.setDomains(parentDomains); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load parent domains for entity %s", urn), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 56a76dcb1e07f..1c52f707c61a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,19 +19,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,49 +38,56 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); final Urn domainUrn = Urn.createFromString(environment.getArgument("domainUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateSetDomainInput( - entityUrn, - domainUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - setDomain(domains, domainUrn); + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateSetDomainInput(entityUrn, domainUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + setDomain(domains, domainUrn); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set Domain to resource with entity urn {}, domain urn {}: {}", entityUrn, domainUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set Domain to resource with entity urn %s, domain urn %s", entityUrn, domainUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to set Domain to resource with entity urn {}, domain urn {}: {}", + entityUrn, + domainUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to set Domain to resource with entity urn %s, domain urn %s", + entityUrn, domainUrn), + e); + } + }); } public static Boolean validateSetDomainInput( - Urn entityUrn, - Urn domainUrn, - EntityService entityService - ) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Domain does not exist.", + entityUrn, domainUrn)); } if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Entity does not exist.", + entityUrn, domainUrn)); } return true; @@ -90,4 +98,4 @@ private static void setDomain(Domains domains, Urn domainUrn) { newDomain.add(domainUrn); domains.setDomains(newDomain); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index 01dd4f1254f8e..b2a82ac7608d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,19 +20,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,39 +38,40 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - validateUnsetDomainInput( - entityUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - unsetDomain(domains); + validateUnsetDomainInput(entityUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + unsetDomain(domains); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to unset Domains for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to unset Domains for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), + e); + } + }); } - public static Boolean validateUnsetDomainInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( @@ -85,4 +87,4 @@ private static void unsetDomain(@Nonnull Domains domains) { } domains.getDomains().clear(); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index dbaf6000477aa..e1b264606074c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.Urn; @@ -19,14 +23,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for updating the embed render URL for an asset. - */ +/** Resolver used for updating the embed render URL for an asset. */ @Slf4j @RequiredArgsConstructor public class UpdateEmbedResolver implements DataFetcher> { @@ -37,62 +34,70 @@ public class UpdateEmbedResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); + final UpdateEmbedInput input = + bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); final Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateEmbedInput(input, _entityService); + try { + final Embed embed = + (Embed) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), EMBED_ASPECT_NAME, _entityService, new Embed()); - if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateEmbedInput( - input, - _entityService - ); - try { - final Embed embed = (Embed) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - EMBED_ASPECT_NAME, - _entityService, - new Embed()); + updateEmbed(embed, input); - updateEmbed(embed, input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); - _entityService.ingestProposal( - proposal, - new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()), - false - ); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e); - } - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()), + false); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Embed for to resource with entity urn %s", entityUrn), + e); + } + }); } /** - * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input - * is not valid. + * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link + * IllegalArgumentException} if the input is not valid. * - * For an input to be valid, the target URN must exist. + *

For an input to be valid, the target URN must exist. * * @param input the input to validate * @param entityService an instance of {@link EntityService} used to validate the input. */ - private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { + private static void validateUpdateEmbedInput( + @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { throw new IllegalArgumentException( - String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn())); + String.format( + "Failed to update embed for entity with urn %s. Entity does not exist!", + input.getUrn())); } } /** * Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}. + * * @param embed an embed to update * @param input the updates to apply */ - private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { + private static void updateEmbed( + @Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index 613f97182c5dd..d2bd2f3fb8a17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.entity.EntityService; @@ -8,12 +10,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for returning whether an entity exists. - */ +/** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { private final EntityService _entityService; @@ -22,7 +19,8 @@ public EntityExistsResolver(final EntityService entityService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { String entityUrnString = bindArgument(environment.getArgument("urn"), String.class); // resolver can be used as its own endpoint or when hydrating an entity if (entityUrnString == null && environment.getSource() != null) { @@ -31,12 +29,14 @@ public CompletableFuture get(final DataFetchingEnvironment environment) Objects.requireNonNull(entityUrnString, "Entity urn must not be null!"); final Urn entityUrn = Urn.createFromString(entityUrnString); - return CompletableFuture.supplyAsync(() -> { - try { - return _entityService.exists(entityUrn); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to check whether entity %s exists", entityUrn.toString())); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _entityService.exists(entityUrn); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to check whether entity %s exists", entityUrn.toString())); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index d8190a160f268..751c6096de1a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -9,17 +9,16 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPrivileges; -import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.extern.slf4j.Slf4j; - import java.util.Collections; import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityPrivilegesResolver implements DataFetcher> { @@ -36,25 +35,28 @@ public CompletableFuture get(DataFetchingEnvironment environme final String urnString = ((Entity) environment.getSource()).getUrn(); final Urn urn = UrnUtils.getUrn(urnString); - return CompletableFuture.supplyAsync(() -> { - switch (urn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return getGlossaryTermPrivileges(urn, context); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return getGlossaryNodePrivileges(urn, context); - case Constants.DATASET_ENTITY_NAME: - return getDatasetPrivileges(urn, context); - case Constants.CHART_ENTITY_NAME: - return getChartPrivileges(urn, context); - case Constants.DASHBOARD_ENTITY_NAME: - return getDashboardPrivileges(urn, context); - case Constants.DATA_JOB_ENTITY_NAME: - return getDataJobPrivileges(urn, context); - default: - log.warn("Tried to get entity privileges for entity type {} but nothing is implemented for it yet", urn.getEntityType()); - return new EntityPrivileges(); - } - }); + return CompletableFuture.supplyAsync( + () -> { + switch (urn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return getGlossaryTermPrivileges(urn, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return getGlossaryNodePrivileges(urn, context); + case Constants.DATASET_ENTITY_NAME: + return getDatasetPrivileges(urn, context); + case Constants.CHART_ENTITY_NAME: + return getChartPrivileges(urn, context); + case Constants.DASHBOARD_ENTITY_NAME: + return getDashboardPrivileges(urn, context); + case Constants.DATA_JOB_ENTITY_NAME: + return getDataJobPrivileges(urn, context); + default: + log.warn( + "Tried to get entity privileges for entity type {} but nothing is implemented for it yet", + urn.getEntityType()); + return new EntityPrivileges(); + } + }); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { @@ -66,7 +68,8 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con } Urn parentNodeUrn = GlossaryUtils.getParentUrn(termUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; @@ -80,25 +83,29 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con result.setCanManageChildren(true); return result; } - Boolean canManageChildren = GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); + Boolean canManageChildren = + GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); result.setCanManageChildren(canManageChildren); Urn parentNodeUrn = GlossaryUtils.getParentUrn(nodeUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup orPrivilegesGroup = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup orPrivilegesGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 69b5b14edfbee..535dbbf70a4cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; @@ -9,22 +12,18 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -36,70 +35,89 @@ public class AddRelatedTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - validateRelatedTermsInput(urn, termUrns); - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - glossaryRelatedTerms = new GlossaryRelatedTerms(); - } - - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + validateRelatedTermsInput(urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add related terms to %s", input.getUrn()), e); } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); } for (Urn termUrn : termUrns) { if (termUrn.equals(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. Tried to create related term with itself.", urn)); } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); } else if (!_entityService.exists(termUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } } return true; } - private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + private Boolean updateRelatedTerms( + List termUrns, + GlossaryTermUrnArray existingTermUrns, + Urn urn, + GlossaryRelatedTerms glossaryRelatedTerms, + Urn actor) { List termsToAdd = new ArrayList<>(); for (Urn termUrn : termUrns) { if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { @@ -117,7 +135,12 @@ private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray exis existingTermUrns.add(newUrn); } - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); return true; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index cc0ab4e03a4e8..815b4662e1ed2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,18 +24,11 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,41 +41,67 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - try { - final GlossaryNodeKey key = new GlossaryNodeKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Node already exists!"); + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + try { + final GlossaryNodeKey key = new GlossaryNodeKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Node already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_NODE_ENTITY_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + mapGlossaryNodeInfo(input)); + + String glossaryNodeUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryNodeUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryNodeUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryNode with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNode with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, mapGlossaryNodeInfo(input)); - - String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - - OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryNodeUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput input) { @@ -90,10 +114,12 @@ private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index ad69e0c5876e2..90979fe918f71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,9 +30,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -37,12 +39,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -57,42 +55,69 @@ public class CreateGlossaryTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - // Ensure there isn't another glossary term with the same name at this level of the glossary - validateGlossaryTermName(parentNode, context, input.getName()); - try { - final GlossaryTermKey key = new GlossaryTermKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Term already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, mapGlossaryTermInfo(input)); - - String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + // Ensure there isn't another glossary term with the same name at this level of the + // glossary + validateGlossaryTermName(parentNode, context, input.getName()); + try { + final GlossaryTermKey key = new GlossaryTermKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Term already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + mapGlossaryTermInfo(input)); + + String glossaryTermUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryTermUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryTermUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryTerm with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryTerm with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryTermUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput input) { @@ -106,7 +131,10 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; @@ -114,25 +142,22 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp private Filter buildParentNodeFilter(final Urn parentNodeUrn) { final Map criterionMap = new HashMap<>(); - criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); + criterionMap.put( + PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); return QueryUtils.newFilter(criterionMap); } private Map getTermsWithSameParent(Urn parentNode, QueryContext context) { try { final Filter filter = buildParentNodeFilter(parentNode); - final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + _entityClient.filter( + GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final List termUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List termUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); return _entityClient.batchGetV2( GLOSSARY_TERM_ENTITY_NAME, @@ -147,14 +172,17 @@ private Map getTermsWithSameParent(Urn parentNode, QueryCon private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) { Map entities = getTermsWithSameParent(parentNode, context); - entities.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); - GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); - if (termInfo.hasName() && termInfo.getName().equals(name)) { - throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary"); - } - } - }); + entities.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); + if (termInfo.hasName() && termInfo.getName().equals(name)) { + throw new IllegalArgumentException( + "Glossary Term with this name already exists at this level of the Business Glossary"); + } + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index 0929c7138528d..f623f0e34b366 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -11,50 +11,59 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteGlossaryEntityResolver(final EntityClient entityClient, EntityService entityService) { + public DeleteGlossaryEntityResolver( + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("urn")); final Urn parentNodeUrn = GlossaryUtils.getParentUrn(entityUrn, context, _entityClient); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { - throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); - } - - try { - _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + if (!_entityService.exists(entityUrn)) { + throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); + } - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { try { - _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for glossary entity with urn %s", + entityUrn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for glossary entity with urn %s", entityUrn), e); + throw new RuntimeException( + String.format( + "Failed to perform delete against glossary entity with urn %s", entityUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against glossary entity with urn %s", entityUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index 1457a308c8774..e7990b1a343d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryNodesResolver implements DataFetcher> { +public class GetRootGlossaryNodesResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryNodesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsNodesResult = _entityClient.filter( - Constants.GLOSSARY_NODE_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); - - final List glossaryNodeUrns = gmsNodesResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); - - final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); - result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); - result.setCount(glossaryNodeUrns.size()); - result.setStart(gmsNodesResult.getFrom()); - result.setTotal(gmsNodesResult.getNumEntities()); - - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsNodesResult = + _entityClient.filter( + Constants.GLOSSARY_NODE_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); + + final List glossaryNodeUrns = + gmsNodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); + result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); + result.setCount(glossaryNodeUrns.size()); + result.setStart(gmsNodesResult.getFrom()); + result.setTotal(gmsNodesResult.getNumEntities()); + + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } @@ -101,4 +103,3 @@ private List mapUnresolvedGlossaryNodes(final List entityUrns return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index f7684e477f830..40e4363dcff93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryTermsResolver implements DataFetcher> { +public class GetRootGlossaryTermsResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryTermsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsTermsResult = _entityClient.filter( - Constants.GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsTermsResult = + _entityClient.filter( + Constants.GLOSSARY_TERM_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); - final List glossaryTermUrns = gmsTermsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List glossaryTermUrns = + gmsTermsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); - result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); - result.setCount(glossaryTermUrns.size()); - result.setStart(gmsTermsResult.getFrom()); - result.setTotal(gmsTermsResult.getNumEntities()); + final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); + result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); + result.setCount(glossaryTermUrns.size()); + result.setStart(gmsTermsResult.getFrom()); + result.setTotal(gmsTermsResult.getNumEntities()); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); - } - }); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index d513d70f39f58..850469f996515 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -14,18 +18,13 @@ import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; - -public class ParentNodesResolver implements DataFetcher> { +public class ParentNodesResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,19 +35,23 @@ public ParentNodesResolver(final EntityClient entityClient) { private void aggregateParentNodes(List nodes, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(dataMap); if (nodeInfo.hasParentNode()) { Urn parentNodeUrn = nodeInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); nodes.add(mappedNode); @@ -64,19 +67,23 @@ private void aggregateParentNodes(List nodes, String urn, QueryCon private GlossaryNode getTermParentNode(String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); if (termInfo.hasParentNode()) { Urn parentNodeUrn = termInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); return mappedNode; @@ -95,27 +102,28 @@ public CompletableFuture get(DataFetchingEnvironment environm final String urn = ((Entity) environment.getSource()).getUrn(); final List nodes = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - final String type = Urn.createFromString(urn).getEntityType(); + return CompletableFuture.supplyAsync( + () -> { + try { + final String type = Urn.createFromString(urn).getEntityType(); - if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { - final GlossaryNode parentNode = getTermParentNode(urn, context); - if (parentNode != null) { - nodes.add(parentNode); - aggregateParentNodes(nodes, parentNode.getUrn(), context); - } - } else { - aggregateParentNodes(nodes, urn, context); - } + if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { + final GlossaryNode parentNode = getTermParentNode(urn, context); + if (parentNode != null) { + nodes.add(parentNode); + aggregateParentNodes(nodes, parentNode.getUrn(), context); + } + } else { + aggregateParentNodes(nodes, urn, context); + } - final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); - return result; - } catch (DataHubGraphQLException | URISyntaxException e) { - throw new RuntimeException(("Failed to load parent nodes")); - } - }); + final ParentNodesResult result = new ParentNodesResult(); + result.setCount(nodes.size()); + result.setNodes(nodes); + return result; + } catch (DataHubGraphQLException | URISyntaxException e) { + throw new RuntimeException(("Failed to load parent nodes")); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 417ef4292d0f7..8c9b792b74e0d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,15 +17,11 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -34,57 +33,82 @@ public class RemoveRelatedTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrnsToRemove = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrnsToRemove = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); - } + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", + urn, urn)); + } - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); - } + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + throw new RuntimeException( + String.format("Related Terms for this Urn do not exist: %s", urn)); + } - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to removes related terms from %s", input.getUrn()), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java index daff0962bc2e8..acfc2cd14f8d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,13 +20,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver that adds a set of native members to a group, if the user and group both exist. - */ +/** Resolver that adds a set of native members to a group, if the user and group both exist. */ public class AddGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,9 +30,11 @@ public AddGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final AddGroupMembersInput input = bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); + final AddGroupMembersInput input = + bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -52,30 +51,37 @@ public CompletableFuture get(final DataFetchingEnvironment environment) String.format("Failed to add members to group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership for group %s when adding group members", groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually added to it", - groupUrnStr)); - } + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership for group %s when adding group members", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually added to it", + groupUrnStr)); + } - try { - // Add each user to the group - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - userUrnList.forEach(userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add group members to group %s", groupUrnStr)); - } - }); + try { + // Add each user to the group + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + userUrnList.forEach( + userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add group members to group %s", groupUrnStr)); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index 75f2a61287ecc..e487ee00608d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,10 +14,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -// Currently, this resolver will override the group details, but not group membership, if a group with the same name already exists. +// Currently, this resolver will override the group details, but not group membership, if a group +// with the same name already exists. public class CreateGroupResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,19 +33,22 @@ public CompletableFuture get(final DataFetchingEnvironment environment) throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - final CreateGroupInput input = bindArgument(environment.getArgument("input"), CreateGroupInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, check if the group already exists. - // Create the Group key. - final CorpGroupKey key = new CorpGroupKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". - return _groupService.createNativeGroup(key, input.getName(), input.getDescription(), authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create group", e); - } - }); + final CreateGroupInput input = + bindArgument(environment.getArgument("input"), CreateGroupInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, check if the group already exists. + // Create the Group key. + final CorpGroupKey key = new CorpGroupKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". + return _groupService.createNativeGroup( + key, input.getName(), input.getDescription(), authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create group", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index d0874b21fb106..93582fb956bd8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class EntityCountsResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,31 +26,42 @@ public EntityCountsResolver(final EntityClient entityClient) { @Override @WithSpan - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final EntityCountInput input = bindArgument(environment.getArgument("input"), EntityCountInput.class); - final EntityCountResults results = new EntityCountResults(); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all counts - Map gmsResult = _entityClient.batchGetTotalEntityCount( - input.getTypes().stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), context.getAuthentication()); - - // bind to a result. - List resultList = gmsResult.entrySet().stream().map(entry -> { - EntityCountResult result = new EntityCountResult(); - result.setCount(Math.toIntExact(entry.getValue())); - result.setEntityType(EntityTypeMapper.getType(entry.getKey())); - return result; - }).collect(Collectors.toList()); - results.setCounts(resultList); - return results; - } catch (Exception e) { - throw new RuntimeException("Failed to get entity counts", e); - } - }); + final EntityCountInput input = + bindArgument(environment.getArgument("input"), EntityCountInput.class); + final EntityCountResults results = new EntityCountResults(); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all counts + Map gmsResult = + _entityClient.batchGetTotalEntityCount( + input.getTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + context.getAuthentication()); + + // bind to a result. + List resultList = + gmsResult.entrySet().stream() + .map( + entry -> { + EntityCountResult result = new EntityCountResult(); + result.setCount(Math.toIntExact(entry.getValue())); + result.setEntityType(EntityTypeMapper.getType(entry.getKey())); + return result; + }) + .collect(Collectors.toList()); + results.setCounts(resultList); + return results; + } catch (Exception e) { + throw new RuntimeException("Failed to get entity counts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index 67cc84a33a954..a6ad8698679f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -24,10 +27,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListGroupsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -41,51 +40,68 @@ public ListGroupsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListGroupsInput input = bindArgument(environment.getArgument("input"), ListGroupsInput.class); + final ListGroupsInput input = + bindArgument(environment.getArgument("input"), ListGroupsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all group Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_GROUP_ENTITY_NAME, + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion().setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, count, context.getAuthentication(), + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), new SearchFlags().setFulltext(true)); - // Then, get hydrate all groups. - final Map entities = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), null, context.getAuthentication()); + // Then, get hydrate all groups. + final Map entities = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListGroupsResult result = new ListGroupsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setGroups(mapUnresolvedGroups(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list groups", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListGroupsResult result = new ListGroupsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setGroups( + mapUnresolvedGroups( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list groups", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - // This method maps urns returned from the list endpoint into Partial Group objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Group objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedGroups(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java index 287b4aa7b5dbd..9fb63b3eb463d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,10 +20,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class RemoveGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -30,9 +29,11 @@ public RemoveGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final RemoveGroupMembersInput input = bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); + final RemoveGroupMembersInput input = + bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -43,37 +44,42 @@ public CompletableFuture get(final DataFetchingEnvironment environment) } final Urn groupUrn = Urn.createFromString(groupUrnStr); - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!_groupService.groupExists(groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( - String.format("Failed to add remove members from group %s. Group does not exist.", groupUrnStr), + String.format( + "Failed to add remove members from group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership when removing group members from group %s", - groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually removed from it", - groupUrnStr)); - } - try { - _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership when removing group members from group %s", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually removed from it", + groupUrnStr)); + } + try { + _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java index 99481868e30ce..e69d6b471f3c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class RemoveGroupResolver implements DataFetcher> { @@ -24,30 +21,39 @@ public RemoveGroupResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String groupUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(groupUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for group with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for group with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against group with urn %s", groupUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against group with urn %s", groupUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 6a4af7563a8d8..036780d446701 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,25 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), + authorizer); } public static boolean canManageSecrets(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); } - private IngestionAuthUtils() { } + private IngestionAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index 1140c031f1d35..ffa9dcf42d176 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -25,11 +25,11 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IngestionResolverUtils { - public static List mapExecutionRequests(final Collection requests) { + public static List mapExecutionRequests( + final Collection requests) { List result = new ArrayList<>(); for (final EntityResponse request : requests) { result.add(mapExecutionRequest(request)); @@ -46,10 +46,13 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe result.setId(entityUrn.getId()); // Map input aspect. Must be present. - final EnvelopedAspect envelopedInput = aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); + final EnvelopedAspect envelopedInput = + aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); if (envelopedInput != null) { - final ExecutionRequestInput executionRequestInput = new ExecutionRequestInput(envelopedInput.getValue().data()); - final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); + final ExecutionRequestInput executionRequestInput = + new ExecutionRequestInput(envelopedInput.getValue().data()); + final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = + new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); inputResult.setTask(executionRequestInput.getTask()); if (executionRequestInput.hasSource()) { @@ -63,23 +66,29 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe } // Map result aspect. Optional. - final EnvelopedAspect envelopedResult = aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + final EnvelopedAspect envelopedResult = + aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (envelopedResult != null) { - final ExecutionRequestResult executionRequestResult = new ExecutionRequestResult(envelopedResult.getValue().data()); + final ExecutionRequestResult executionRequestResult = + new ExecutionRequestResult(envelopedResult.getValue().data()); result.setResult(mapExecutionRequestResult(executionRequestResult)); } return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource + mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); result.setType(execRequestSource.getType()); return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult + mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); result.setStatus(execRequestResult.getStatus()); result.setStartTimeMs(execRequestResult.getStartTimeMs()); result.setDurationMs(execRequestResult.getDurationMs()); @@ -90,7 +99,8 @@ public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapE return result; } - public static StructuredReport mapStructuredReport(final StructuredExecutionReport structuredReport) { + public static StructuredReport mapStructuredReport( + final StructuredExecutionReport structuredReport) { StructuredReport structuredReportResult = new StructuredReport(); structuredReportResult.setType(structuredReport.getType()); structuredReportResult.setSerializedValue(structuredReport.getSerializedValue()); @@ -98,7 +108,8 @@ public static StructuredReport mapStructuredReport(final StructuredExecutionRepo return structuredReportResult; } - public static List mapIngestionSources(final Collection entities) { + public static List mapIngestionSources( + final Collection entities) { final List results = new ArrayList<>(); for (EntityResponse response : entities) { try { @@ -118,16 +129,19 @@ public static IngestionSource mapIngestionSource(final EntityResponse ingestionS final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); if (envelopedInfo == null) { - throw new IllegalStateException("No ingestion source info aspect exists for urn: " + entityUrn); + throw new IllegalStateException( + "No ingestion source info aspect exists for urn: " + entityUrn); } // Bind into a strongly typed object. - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); return mapIngestionSourceInfo(entityUrn, ingestionSourceInfo); } - public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHubIngestionSourceInfo info) { + public static IngestionSource mapIngestionSourceInfo( + final Urn urn, final DataHubIngestionSourceInfo info) { final IngestionSource result = new IngestionSource(); result.setUrn(urn.toString()); result.setName(info.getName()); @@ -139,29 +153,30 @@ public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHu return result; } - public static IngestionConfig mapIngestionSourceConfig(final DataHubIngestionSourceConfig config) { + public static IngestionConfig mapIngestionSourceConfig( + final DataHubIngestionSourceConfig config) { final IngestionConfig result = new IngestionConfig(); result.setRecipe(config.getRecipe()); result.setVersion(config.getVersion()); result.setExecutorId(config.getExecutorId()); result.setDebugMode(config.isDebugMode()); if (config.getExtraArgs() != null) { - List extraArgs = config.getExtraArgs() - .keySet() - .stream() - .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) - .collect(Collectors.toList()); + List extraArgs = + config.getExtraArgs().keySet().stream() + .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) + .collect(Collectors.toList()); result.setExtraArgs(extraArgs); } return result; } - public static IngestionSchedule mapIngestionSourceSchedule(final DataHubIngestionSourceSchedule schedule) { + public static IngestionSchedule mapIngestionSourceSchedule( + final DataHubIngestionSourceSchedule schedule) { final IngestionSchedule result = new IngestionSchedule(); result.setInterval(schedule.getInterval()); result.setTimezone(schedule.getTimezone()); return result; } - private IngestionResolverUtils() { } + private IngestionResolverUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java index 7f9cb6176989f..e346f2b077c98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -22,15 +26,9 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Cancels a requested ingestion execution by emitting a KILL signal. - */ -public class CancelIngestionExecutionRequestResolver implements DataFetcher> { +/** Cancels a requested ingestion execution by emitting a KILL signal. */ +public class CancelIngestionExecutionRequestResolver + implements DataFetcher> { private static final String KILL_EXECUTION_REQUEST_SIGNAL = "KILL"; @@ -44,45 +42,58 @@ public CancelIngestionExecutionRequestResolver(final EntityClient entityClient) public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { - if (IngestionAuthUtils.canManageIngestion(context)) { + final CancelIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); - final CancelIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); + try { + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); - try { - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", ingestionSourceUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = - response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - // Build the arguments map. - final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); - execSignal.setSignal(KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. - execSignal.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execSignal.setCreatedAt(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(Urn.createFromString(context.getActorUrn())) - ); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn( - input.getExecutionRequestUrn()), EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, execSignal); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to submit cancel signal %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Build the arguments map. + final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); + execSignal.setSignal( + KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. + execSignal.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execSignal.setCreatedAt( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(input.getExecutionRequestUrn()), + EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, + execSignal); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to submit cancel signal %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java index ea20b837e0a1f..8ef5447cd9433 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; @@ -30,15 +34,9 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ -public class CreateIngestionExecutionRequestResolver implements DataFetcher> { +/** Creates an on-demand ingestion execution request. */ +public class CreateIngestionExecutionRequestResolver + implements DataFetcher> { private static final String RUN_INGEST_TASK_NAME = "RUN_INGEST"; private static final String MANUAL_EXECUTION_SOURCE_NAME = "MANUAL_INGESTION_SOURCE"; @@ -49,7 +47,8 @@ public class CreateIngestionExecutionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final CreateIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - // Fetch the original ingestion source - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); - - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - - if (!ingestionSourceInfo.getConfig().hasRecipe()) { - throw new DataHubGraphQLException( - String.format("Failed to find valid ingestion source with urn %s. Missing recipe", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - // Build the arguments map. - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task - execInput.setSource( - new ExecutionRequestSource().setType(MANUAL_EXECUTION_SOURCE_NAME).setIngestionSource(ingestionSourceUrn)); - execInput.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - String recipe = ingestionSourceInfo.getConfig().getRecipe(); - recipe = injectRunId(recipe, executionRequestUrn.toString()); - recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); - arguments.put(RECIPE_ARG_NAME, recipe); - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().hasVersion() - ? ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion() - ); - if (ingestionSourceInfo.getConfig().hasVersion()) { - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); - } - String debugMode = "false"; - if (ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; - } - if (ingestionSourceInfo.getConfig().hasExtraArgs()) { - arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final CreateIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + // Fetch the original ingestion source + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + + if (!ingestionSourceInfo.getConfig().hasRecipe()) { + throw new DataHubGraphQLException( + String.format( + "Failed to find valid ingestion source with urn %s. Missing recipe", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Build the arguments map. + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task + execInput.setSource( + new ExecutionRequestSource() + .setType(MANUAL_EXECUTION_SOURCE_NAME) + .setIngestionSource(ingestionSourceUrn)); + execInput.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map arguments = new HashMap<>(); + String recipe = ingestionSourceInfo.getConfig().getRecipe(); + recipe = injectRunId(recipe, executionRequestUrn.toString()); + recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); + arguments.put(RECIPE_ARG_NAME, recipe); + arguments.put( + VERSION_ARG_NAME, + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); + if (ingestionSourceInfo.getConfig().hasVersion()) { + arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); + } + String debugMode = "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + } + if (ingestionSourceInfo.getConfig().hasExtraArgs()) { + arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + } + arguments.put(DEBUG_MODE_ARG_NAME, debugMode); + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new ingestion execution request %s", input), e); + } } - arguments.put(DEBUG_MODE_ARG_NAME, debugMode); - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, - EXECUTION_REQUEST_ENTITY_NAME, EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new ingestion execution request %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Injects an override run id into a recipe for tracking purposes. Any existing run id will be overwritten. + * Injects an override run id into a recipe for tracking purposes. Any existing run id will be + * overwritten. * - * TODO: Determine if this should be handled in the executor itself. + *

TODO: Determine if this should be handled in the executor itself. * * @param runId the run id to place into the recipe * @return a modified recipe JSON string @@ -149,7 +170,8 @@ private String injectRunId(final String originalJson, final String runId) { return obj.toString(); } catch (JSONException e) { // This should ideally never be hit. - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided."); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided."); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java index 1886db62ae450..2505ce28c5c2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -10,26 +13,19 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.execution.ExecutionRequestInput; import com.linkedin.execution.ExecutionRequestSource; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ +/** Creates an on-demand ingestion execution request. */ public class CreateTestConnectionRequestResolver implements DataFetcher> { private static final String TEST_CONNECTION_TASK_NAME = "TEST_CONNECTION"; @@ -41,7 +37,8 @@ public class CreateTestConnectionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final CreateTestConnectionRequestInput input = - bindArgument(environment.getArgument("input"), CreateTestConnectionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(TEST_CONNECTION_TASK_NAME); - execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); - execInput.setExecutorId(DEFAULT_EXECUTOR_ID); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - arguments.put(RECIPE_ARG_NAME, IngestionUtils.injectPipelineName(input.getRecipe(), executionRequestUrn.toString())); - if (input.getVersion() != null) { - arguments.put(VERSION_ARG_NAME, input.getVersion()); - } - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, EXECUTION_REQUEST_ENTITY_NAME, - EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new test ingestion connection request %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final CreateTestConnectionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateTestConnectionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(TEST_CONNECTION_TASK_NAME); + execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); + execInput.setExecutorId(DEFAULT_EXECUTOR_ID); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map arguments = new HashMap<>(); + arguments.put( + RECIPE_ARG_NAME, + IngestionUtils.injectPipelineName( + input.getRecipe(), executionRequestUrn.toString())); + if (input.getVersion() != null) { + arguments.put(VERSION_ARG_NAME, input.getVersion()); + } + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to create new test ingestion connection request %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 8880330d63495..722ffe3aba6b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -19,12 +19,10 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Retrieves an Ingestion Execution Request by primary key (urn). - */ +/** Retrieves an Ingestion Execution Request by primary key (urn). */ @Slf4j -public class GetIngestionExecutionRequestResolver implements DataFetcher> { +public class GetIngestionExecutionRequestResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -33,32 +31,40 @@ public GetIngestionExecutionRequestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch specific execution request - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No execution request found - throw new DataHubGraphQLException(String.format("Failed to find Execution Request with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve execution request", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch specific execution request + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No execution request found + throw new DataHubGraphQLException( + String.format("Failed to find Execution Request with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Execution request found + return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve execution request", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index c72f273a9027e..01100a24d6b15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -29,11 +29,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j -public class IngestionSourceExecutionRequestsResolver implements DataFetcher> { +public class IngestionSourceExecutionRequestsResolver + implements DataFetcher> { private static final String INGESTION_SOURCE_FIELD_NAME = "ingestionSource"; private static final String REQUEST_TIME_MS_FIELD_NAME = "requestTimeMs"; @@ -45,64 +44,77 @@ public IngestionSourceExecutionRequestsResolver(final EntityClient entityClient) } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final String urn = ((IngestionSource) environment.getSource()).getUrn(); - final Integer start = environment.getArgument("start") != null ? environment.getArgument("start") : 0; - final Integer count = environment.getArgument("count") != null ? environment.getArgument("count") : 10; + final Integer start = + environment.getArgument("start") != null ? environment.getArgument("start") : 0; + final Integer count = + environment.getArgument("count") != null ? environment.getArgument("count") : 10; - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + try { - try { + // 1. Fetch the related edges + final Criterion filterCriterion = + new Criterion() + .setField(INGESTION_SOURCE_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(urn); - // 1. Fetch the related edges - final Criterion filterCriterion = new Criterion() - .setField(INGESTION_SOURCE_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(urn); + final SearchResult executionsSearchResult = + _entityClient.filter( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication()); - final SearchResult executionsSearchResult = _entityClient.filter( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - new SortCriterion().setField(REQUEST_TIME_MS_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication() - ); + // 2. Batch fetch the related ExecutionRequests + final Set relatedExecRequests = + executionsSearchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); - // 2. Batch fetch the related ExecutionRequests - final Set relatedExecRequests = executionsSearchResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Map entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + relatedExecRequests, + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - relatedExecRequests, - ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - - // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests - final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); - result.setStart(executionsSearchResult.getFrom()); - result.setCount(executionsSearchResult.getPageSize()); - result.setTotal(executionsSearchResult.getNumEntities()); - result.setExecutionRequests(IngestionResolverUtils.mapExecutionRequests( - executionsSearchResult.getEntities() - .stream() - .map(searchResult -> entities.get(searchResult.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - )); - return result; - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve executions associated with ingestion source with urn %s", urn), e); - } - }); + // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests + final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); + result.setStart(executionsSearchResult.getFrom()); + result.setCount(executionsSearchResult.getPageSize()); + result.setTotal(executionsSearchResult.getNumEntities()); + result.setExecutionRequests( + IngestionResolverUtils.mapExecutionRequests( + executionsSearchResult.getEntities().stream() + .map(searchResult -> entities.get(searchResult.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve executions associated with ingestion source with urn %s", + urn), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 05fcacf7c0946..0b909dee51374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -7,11 +9,8 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class RollbackIngestionResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -20,33 +19,36 @@ public RollbackIngestionResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - final RollbackIngestionInput input = bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); - final String runId = input.getRunId(); + final RollbackIngestionInput input = + bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); + final String runId = input.getRunId(); - rollbackIngestion(runId, context); - return true; - }); + rollbackIngestion(runId, context); + return true; + }); } - public CompletableFuture rollbackIngestion(final String runId, final QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to rollback ingestion execution", e); - } - }); - + public CompletableFuture rollbackIngestion( + final String runId, final QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.rollbackIngestion(runId, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to rollback ingestion execution", e); + } + }); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index e1745031d9dae..577780e53ce86 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -17,23 +21,16 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the MANAGE_SECRETS privilege. + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. */ public class CreateSecretResolver implements DataFetcher> { private final EntityClient _entityClient; private final SecretService _secretService; - public CreateSecretResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + public CreateSecretResolver(final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @@ -41,36 +38,46 @@ public CreateSecretResolver( @Override public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateSecretInput input = bindArgument(environment.getArgument("input"), CreateSecretInput.class); + final CreateSecretInput input = + bindArgument(environment.getArgument("input"), CreateSecretInput.class); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { - if (IngestionAuthUtils.canManageSecrets(context)) { + try { + // Create the Ingestion source key --> use the display name as a unique id to ensure + // it's not duplicated. + final DataHubSecretKey key = new DataHubSecretKey(); + key.setId(input.getName()); - try { - // Create the Ingestion source key --> use the display name as a unique id to ensure it's not duplicated. - final DataHubSecretKey key = new DataHubSecretKey(); - key.setId(input.getName()); + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Secret already exists!"); + } - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Secret already exists!"); - } - - // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + // Create the secret value. + final DataHubSecretValue value = new DataHubSecretValue(); + value.setName(input.getName()); + value.setValue(_secretService.encrypt(input.getValue())); + value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + value.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, SECRETS_ENTITY_NAME, - SECRET_VALUE_ASPECT_NAME, value); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new secret with name %s", input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, SECRETS_ENTITY_NAME, SECRET_VALUE_ASPECT_NAME, value); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new secret with name %s", input.getName()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java index b35931420c078..228d5a094cdef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. - */ +/** Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. */ public class DeleteSecretResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,15 +24,19 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageSecrets(context)) { final String secretUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(secretUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return secretUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against secret with urn %s", secretUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return secretUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against secret with urn %s", secretUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 85c6c6754470d..67564aa721bda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -23,11 +25,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / decryption. - * Requires the MANAGE_SECRETS privilege. + * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / + * decryption. Requires the MANAGE_SECRETS privilege. */ public class GetSecretValuesResolver implements DataFetcher>> { @@ -35,60 +35,67 @@ public class GetSecretValuesResolver implements DataFetcher> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final GetSecretValuesInput input = bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); + final GetSecretValuesInput input = + bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch secrets - final Set urns = input.getSecrets() - .stream() - .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) - .collect(Collectors.toSet()); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch secrets + final Set urns = + input.getSecrets().stream() + .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) + .collect(Collectors.toSet()); - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(urns), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>(urns), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); - // Now for each secret, decrypt and return the value. If no secret was found, then we will simply omit it from the list. - // There is no ordering guarantee for the list. - return entities.values() - .stream() - .map(entity -> { - EnvelopedAspect aspect = entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); - if (aspect != null) { - // Aspect is present. - final DataHubSecretValue secretValue = new DataHubSecretValue(aspect.getValue().data()); - // Now decrypt the encrypted secret. - final String decryptedSecretValue = decryptSecret(secretValue.getValue()); - return new SecretValue(secretValue.getName(), decryptedSecretValue); - } else { - // No secret exists - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + // Now for each secret, decrypt and return the value. If no secret was found, then we + // will simply omit it from the list. + // There is no ordering guarantee for the list. + return entities.values().stream() + .map( + entity -> { + EnvelopedAspect aspect = + entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); + if (aspect != null) { + // Aspect is present. + final DataHubSecretValue secretValue = + new DataHubSecretValue(aspect.getValue().data()); + // Now decrypt the encrypted secret. + final String decryptedSecretValue = decryptSecret(secretValue.getValue()); + return new SecretValue(secretValue.getName(), decryptedSecretValue); + } else { + // No secret exists + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private String decryptSecret(final String encryptedSecret) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index b0d8c9fd34303..eb054295af09b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; @@ -31,13 +34,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. - */ +/** Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. */ @Slf4j public class ListSecretsResolver implements DataFetcher> { @@ -52,55 +49,66 @@ public ListSecretsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final ListSecretsInput input = bindArgument(environment.getArgument("input"), ListSecretsInput.class); + final ListSecretsInput input = + bindArgument(environment.getArgument("input"), ListSecretsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all secrets - final SearchResult gmsResult = _entityClient.search( - Constants.SECRETS_ENTITY_NAME, - query, - null, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - // Then, resolve all secrets - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListSecretsResult result = new ListSecretsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setSecrets(mapEntities(gmsResult.getEntities().stream() - .map(entity -> entities.get(entity.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()))); - return result; - - } catch (Exception e) { - throw new RuntimeException("Failed to list secrets", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all secrets + final SearchResult gmsResult = + _entityClient.search( + Constants.SECRETS_ENTITY_NAME, + query, + null, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + // Then, resolve all secrets + final Map entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + + // Now that we have entities we can bind this to a result. + final ListSecretsResult result = new ListSecretsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setSecrets( + mapEntities( + gmsResult.getEntities().stream() + .map(entity -> entities.get(entity.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + + } catch (Exception e) { + throw new RuntimeException("Failed to list secrets", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final List entities) { @@ -113,7 +121,8 @@ private List mapEntities(final List entities) { final EnvelopedAspect envelopedInfo = aspects.get(Constants.SECRET_VALUE_ASPECT_NAME); // Bind into a strongly typed object. - final DataHubSecretValue secretValue = new DataHubSecretValue(envelopedInfo.getValue().data()); + final DataHubSecretValue secretValue = + new DataHubSecretValue(envelopedInfo.getValue().data()); // Map using the strongly typed object. results.add(mapSecretValue(entityUrn, secretValue)); @@ -128,4 +137,4 @@ private Secret mapSecretValue(final Urn urn, final DataHubSecretValue value) { result.setDescription(value.getDescription(GetMode.NULL)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java index e510a9fff80aa..225a5801adec9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java @@ -8,10 +8,7 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - -/** - * Utility methods to encrypt and decrypt DataHub secrets. - */ +/** Utility methods to encrypt and decrypt DataHub secrets. */ public class SecretUtils { static String encrypt(String value, String secret) { @@ -30,7 +27,8 @@ static String encrypt(String value, String secret) { } Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, secretKey); - return Base64.getEncoder().encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); + return Base64.getEncoder() + .encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { throw new RuntimeException("Failed to encrypt value using provided secret!"); } @@ -59,6 +57,5 @@ static String decrypt(String encryptedValue, String secret) { return null; } - private SecretUtils() { - } + private SecretUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java index 38050331318ca..0666fab52dd4e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java @@ -9,10 +9,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - /** - * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires MANAGE_INGESTION - * privilege. + * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires + * MANAGE_INGESTION privilege. */ public class DeleteIngestionSourceResolver implements DataFetcher> { @@ -28,15 +27,21 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageIngestion(context)) { final String ingestionSourceUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(ingestionSourceUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return ingestionSourceUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against ingestion source with urn %s", ingestionSourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return ingestionSourceUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against ingestion source with urn %s", + ingestionSourceUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java index 562d06b79d2c7..3b6790212ba23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java @@ -19,9 +19,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -/** - * Gets a particular Ingestion Source by urn. - */ +/** Gets a particular Ingestion Source by urn. */ @Slf4j public class GetIngestionSourceResolver implements DataFetcher> { @@ -32,31 +30,37 @@ public GetIngestionSourceResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No ingestion source found - throw new DataHubGraphQLException(String.format("Failed to find Ingestion Source with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Ingestion source found - return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve ingestion source", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No ingestion source found + throw new DataHubGraphQLException( + String.format("Failed to find Ingestion Source with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Ingestion source found + return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve ingestion source", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index d019473606e58..51c9e30aadcce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -26,12 +28,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. - */ -public class ListIngestionSourcesResolver implements DataFetcher> { +/** Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. */ +public class ListIngestionSourcesResolver + implements DataFetcher> { private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,57 +43,74 @@ public ListIngestionSourcesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { - final ListIngestionSourcesInput input = bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); + final ListIngestionSourcesInput input = + bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all ingestion sources Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.INGESTION_SOURCE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - null, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all ingestion sources Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.INGESTION_SOURCE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + null, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, resolve all ingestion sources - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), - context.getAuthentication()); + // Then, resolve all ingestion sources + final Map entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), + context.getAuthentication()); - final Collection sortedEntities = entities.values() - .stream() - .sorted(Comparator.comparingLong(s -> -s.getAspects().get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME).getCreated().getTime())) - .collect(Collectors.toList()); + final Collection sortedEntities = + entities.values().stream() + .sorted( + Comparator.comparingLong( + s -> + -s.getAspects() + .get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME) + .getCreated() + .getTime())) + .collect(Collectors.toList()); - // Now that we have entities we can bind this to a result. - final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setIngestionSources(IngestionResolverUtils.mapIngestionSources(sortedEntities)); - return result; + // Now that we have entities we can bind this to a result. + final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setIngestionSources( + IngestionResolverUtils.mapIngestionSources(sortedEntities)); + return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list ingestion sources", e); - } - }); + } catch (Exception e) { + throw new RuntimeException("Failed to list ingestion sources", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java index 68e334bd976f8..6194452e4b6fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -19,23 +23,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. - */ +/** Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. */ @Slf4j public class UpsertIngestionSourceResolver implements DataFetcher> { @@ -49,46 +45,60 @@ public UpsertIngestionSourceResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final Optional ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); - final UpdateIngestionSourceInput input = bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); - - // Create the policy info. - final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); - final MetadataChangeProposal proposal; - if (ingestionSourceUrn.isPresent()) { - // Update existing ingestion source - try { - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); - } catch (URISyntaxException e) { - throw new DataHubGraphQLException( - String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), - DataHubGraphQLErrorCode.BAD_REQUEST); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final Optional ingestionSourceUrn = + Optional.ofNullable(environment.getArgument("urn")); + final UpdateIngestionSourceInput input = + bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + + // Create the policy info. + final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); + final MetadataChangeProposal proposal; + if (ingestionSourceUrn.isPresent()) { + // Update existing ingestion source + try { + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(ingestionSourceUrn.get()), + INGESTION_INFO_ASPECT_NAME, + info); + } catch (URISyntaxException e) { + throw new DataHubGraphQLException( + String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } else { + // Create new ingestion source + // Since we are creating a new Ingestion Source, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); + key.setId(uuidStr); + proposal = + buildMetadataChangeProposalWithKey( + key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); + } + + try { + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform update against ingestion source with urn %s", + input.toString()), + e); + } } - } else { - // Create new ingestion source - // Since we are creating a new Ingestion Source, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); - key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); - } - - try { - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against ingestion source with urn %s", input.toString()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private DataHubIngestionSourceInfo mapIngestionSourceInfo(final UpdateIngestionSourceInput input) { + private DataHubIngestionSourceInfo mapIngestionSourceInfo( + final UpdateIngestionSourceInput input) { final DataHubIngestionSourceInfo result = new DataHubIngestionSourceInfo(); result.setType(input.getType()); result.setName(input.getName()); @@ -113,15 +123,17 @@ private DataHubIngestionSourceConfig mapConfig(final UpdateIngestionSourceConfig result.setDebugMode(input.getDebugMode()); } if (input.getExtraArgs() != null) { - Map extraArgs = input.getExtraArgs() - .stream() - .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + Map extraArgs = + input.getExtraArgs().stream() + .collect( + Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); result.setExtraArgs(new StringMap(extraArgs)); } return result; } - private DataHubIngestionSourceSchedule mapSchedule(final UpdateIngestionSourceScheduleInput input) { + private DataHubIngestionSourceSchedule mapSchedule( + final UpdateIngestionSourceScheduleInput input) { final DataHubIngestionSourceSchedule result = new DataHubIngestionSourceSchedule(); result.setInterval(input.getInterval()); result.setTimezone(input.getTimezone()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index ea61b5e258d8b..06bad27e27062 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -32,10 +32,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -/** - * GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job - */ -public class DataJobRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job */ +public class DataJobRunsResolver + implements DataFetcher> { private static final String PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME = "parentTemplate"; private static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; @@ -48,74 +47,76 @@ public DataJobRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS incident model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 3: Map GMS incident model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); - - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } private Filter buildTaskRunsEntityFilter(final String entityUrn) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 3ecf396f808b3..d595b1e513d75 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -33,11 +33,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of task runs associated with a Dataset. - */ -public class EntityRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of task runs associated with a Dataset. */ +public class EntityRunsResolver + implements DataFetcher> { private static final String INPUT_FIELD_NAME = "inputs.keyword"; private static final String OUTPUT_FIELD_NAME = "outputs.keyword"; @@ -51,76 +49,84 @@ public EntityRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); - final RelationshipDirection direction = RelationshipDirection.valueOf(environment.getArgumentOrDefault("direction", - RelationshipDirection.INCOMING.toString())); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final RelationshipDirection direction = + RelationshipDirection.valueOf( + environment.getArgumentOrDefault( + "direction", RelationshipDirection.INCOMING.toString())); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS instance model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS instance model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } - private Filter buildTaskRunsEntityFilter(final String entityUrn, final RelationshipDirection direction) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(direction.equals(RelationshipDirection.INCOMING) ? INPUT_FIELD_NAME : OUTPUT_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + private Filter buildTaskRunsEntityFilter( + final String entityUrn, final RelationshipDirection direction) { + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField( + direction.equals(RelationshipDirection.INCOMING) + ? INPUT_FIELD_NAME + : OUTPUT_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 8fc3a60900662..a0caef20a4755 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,10 +18,6 @@ import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -29,8 +27,9 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,11 +42,13 @@ public class UpdateLineageResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - final UpdateLineageInput input = bindArgument(environment.getArgument("input"), UpdateLineageInput.class); + final UpdateLineageInput input = + bindArgument(environment.getArgument("input"), UpdateLineageInput.class); final List edgesToAdd = input.getEdgesToAdd(); final List edgesToRemove = input.getEdgesToRemove(); - // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage for each entity + // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage + // for each entity checkPrivileges(context, edgesToAdd, edgesToRemove); // organize data to make updating lineage cleaner @@ -57,77 +58,118 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw downstreamUrns.addAll(downstreamToUpstreamsToAdd.keySet()); downstreamUrns.addAll(downstreamToUpstreamsToRemove.keySet()); - return CompletableFuture.supplyAsync(() -> { - // build MCP for every downstreamUrn - for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); - } - - final List upstreamUrnsToAdd = downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); - final List upstreamUrnsToRemove = downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); - try { - switch (downstreamUrn.getEntityType()) { - case Constants.DATASET_ENTITY_NAME: - // need to filter out dataJobs since this is a valid lineage edge, but will be handled in the downstream direction for DataJobInputOutputs - final List filteredUpstreamUrnsToAdd = filterOutDataJobUrns(upstreamUrnsToAdd); - final List filteredUpstreamUrnsToRemove = filterOutDataJobUrns(upstreamUrnsToRemove); - - _lineageService.updateDatasetLineage(downstreamUrn, filteredUpstreamUrnsToAdd, filteredUpstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.CHART_ENTITY_NAME: - _lineageService.updateChartLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DASHBOARD_ENTITY_NAME: - _lineageService.updateDashboardLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DATA_JOB_ENTITY_NAME: - _lineageService.updateDataJobUpstreamLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - default: + return CompletableFuture.supplyAsync( + () -> { + // build MCP for every downstreamUrn + for (Urn downstreamUrn : downstreamUrns) { + if (!_entityService.exists(downstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); + } + + final List upstreamUrnsToAdd = + downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); + final List upstreamUrnsToRemove = + downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); + try { + switch (downstreamUrn.getEntityType()) { + case Constants.DATASET_ENTITY_NAME: + // need to filter out dataJobs since this is a valid lineage edge, but will be + // handled in the downstream direction for DataJobInputOutputs + final List filteredUpstreamUrnsToAdd = + filterOutDataJobUrns(upstreamUrnsToAdd); + final List filteredUpstreamUrnsToRemove = + filterOutDataJobUrns(upstreamUrnsToRemove); + + _lineageService.updateDatasetLineage( + downstreamUrn, + filteredUpstreamUrnsToAdd, + filteredUpstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.CHART_ENTITY_NAME: + _lineageService.updateChartLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DASHBOARD_ENTITY_NAME: + _lineageService.updateDashboardLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DATA_JOB_ENTITY_NAME: + _lineageService.updateDataJobUpstreamLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + default: + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", downstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", downstreamUrn), e); - } - } - - Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); - Map> upstreamToDownstreamsToRemove = getUpstreamToDownstreamMap(edgesToRemove); - Set upstreamUrns = new HashSet<>(); - upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); - upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); - - // build MCP for upstreamUrn if necessary - for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); - } - - final List downstreamUrnsToAdd = upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); - final List downstreamUrnsToRemove = upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); - try { - if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - // need to filter out dataJobs since this is a valid lineage edge, but is handled in the upstream direction for DataJobs - final List filteredDownstreamUrnsToAdd = filterOutDataJobUrns(downstreamUrnsToAdd); - final List filteredDownstreamUrnsToRemove = filterOutDataJobUrns(downstreamUrnsToRemove); - - _lineageService.updateDataJobDownstreamLineage( - upstreamUrn, filteredDownstreamUrnsToAdd, filteredDownstreamUrnsToRemove, actor, context.getAuthentication() - ); + + Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); + Map> upstreamToDownstreamsToRemove = + getUpstreamToDownstreamMap(edgesToRemove); + Set upstreamUrns = new HashSet<>(); + upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); + upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + + // build MCP for upstreamUrn if necessary + for (Urn upstreamUrn : upstreamUrns) { + if (!_entityService.exists(upstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); + } + + final List downstreamUrnsToAdd = + upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); + final List downstreamUrnsToRemove = + upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); + try { + if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + // need to filter out dataJobs since this is a valid lineage edge, but is handled in + // the upstream direction for DataJobs + final List filteredDownstreamUrnsToAdd = + filterOutDataJobUrns(downstreamUrnsToAdd); + final List filteredDownstreamUrnsToRemove = + filterOutDataJobUrns(downstreamUrnsToRemove); + + _lineageService.updateDataJobDownstreamLineage( + upstreamUrn, + filteredDownstreamUrnsToAdd, + filteredDownstreamUrnsToRemove, + actor, + context.getAuthentication()); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", upstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", upstreamUrn), e); - } - } - return true; - }); + return true; + }); } private List filterOutDataJobUrns(@Nonnull final List urns) { - return urns.stream().filter( - upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME) - ).collect(Collectors.toList()); + return urns.stream() + .filter(upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); } private Map> getDownstreamToUpstreamsMap(@Nonnull final List edges) { @@ -156,7 +198,10 @@ private Map> getUpstreamToDownstreamMap(@Nonnull final List edgesToAdd, - @Nonnull final List edgesToRemove - ) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup editLineagePrivileges = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + @Nonnull final List edgesToRemove) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java index 023686b1d10c9..7f031cb481852 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java @@ -8,21 +8,19 @@ import java.util.concurrent.CompletableFuture; import org.dataloader.DataLoader; - /** * Generic GraphQL resolver responsible for * - * 1. Generating a single input AspectLoadKey. - * 2. Resolving a single {@link Aspect}. - * + *

1. Generating a single input AspectLoadKey. 2. Resolving a single {@link Aspect}. */ public class AspectResolver implements DataFetcher> { - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader("Aspect"); - final String fieldName = environment.getField().getName(); - final Long version = environment.getArgument("version"); - final String urn = ((Entity) environment.getSource()).getUrn(); - return loader.load(new VersionedAspectKey(urn, fieldName, version)); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader("Aspect"); + final String fieldName = environment.getField().getName(); + final Long version = environment.getArgument("version"); + final String urn = ((Entity) environment.getSource()).getUrn(); + return loader.load(new VersionedAspectKey(urn, fieldName, version)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index 20e0e4ae1c22a..ecf36769dfa9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -5,7 +5,6 @@ import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -21,8 +20,7 @@ public class BatchGetEntitiesResolver implements DataFetcher> entityTypes, - final Function> entitiesProvider - ) { + final Function> entitiesProvider) { _entityTypes = entityTypes; _entitiesProvider = entitiesProvider; } @@ -32,22 +30,28 @@ public CompletableFuture> get(DataFetchingEnvironment environment) final List entities = _entitiesProvider.apply(environment); Map> entityTypeToEntities = new HashMap<>(); - entities.forEach((entity) -> { - EntityType type = entity.getType(); - List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + entities.forEach( + (entity) -> { + EntityType type = entity.getType(); + List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + }); List>> entitiesFutures = new ArrayList<>(); for (Map.Entry> entry : entityTypeToEntities.entrySet()) { - CompletableFuture> entitiesFuture = BatchLoadUtils - .batchLoadEntitiesOfSameType(entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); + CompletableFuture> entitiesFuture = + BatchLoadUtils.batchLoadEntitiesOfSameType( + entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); entitiesFutures.add(entitiesFuture); } return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) - .thenApply(v -> entitiesFutures.stream().flatMap(future -> future.join().stream()).collect(Collectors.toList())); + .thenApply( + v -> + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index d44f2b77029f3..c63ec819e8f6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; @@ -17,15 +19,14 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub + * graph. Lineage relationship denotes whether an entity is directly upstream or downstream of + * another entity */ @Slf4j -public class EntityLineageResultResolver implements DataFetcher> { +public class EntityLineageResultResolver + implements DataFetcher> { private final SiblingGraphService _siblingGraphService; @@ -39,38 +40,34 @@ public CompletableFuture get(DataFetchingEnvironment enviro final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); - @Nullable - final Integer start = input.getStart(); // Optional! - @Nullable - final Integer count = input.getCount(); // Optional! - @Nullable - final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! - @Nullable - final Long startTimeMillis = input.getStartTimeMillis(); // Optional! - @Nullable - final Long endTimeMillis = input.getEndTimeMillis(); // Optional! + @Nullable final Integer start = input.getStart(); // Optional! + @Nullable final Integer count = input.getCount(); // Optional! + @Nullable final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! + @Nullable final Long startTimeMillis = input.getStartTimeMillis(); // Optional! + @Nullable final Long endTimeMillis = input.getEndTimeMillis(); // Optional! com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - return mapEntityRelationships( - _siblingGraphService.getLineage( - Urn.createFromString(urn), - resolvedDirection, - start != null ? start : 0, - count != null ? count : 100, - 1, - separateSiblings != null ? input.getSeparateSiblings() : false, - new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return mapEntityRelationships( + _siblingGraphService.getLineage( + Urn.createFromString(urn), + resolvedDirection, + start != null ? start : 0, + count != null ? count : 100, + 1, + separateSiblings != null ? input.getSeparateSiblings() : false, + new HashSet<>(), + startTimeMillis, + endTimeMillis)); + } catch (URISyntaxException e) { + log.error("Failed to fetch lineage for {}", urn); + throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + } + }); } private EntityLineageResult mapEntityRelationships( @@ -80,10 +77,10 @@ private EntityLineageResult mapEntityRelationships( result.setCount(entityLineageResult.getCount()); result.setTotal(entityLineageResult.getTotal()); result.setFiltered(entityLineageResult.getFiltered()); - result.setRelationships(entityLineageResult.getRelationships() - .stream() - .map(this::mapEntityRelationship) - .collect(Collectors.toList())); + result.setRelationships( + entityLineageResult.getRelationships().stream() + .map(this::mapEntityRelationship) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 43b28ef85f78a..223548d5d6242 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.load; -import com.linkedin.common.EntityRelationship; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; @@ -17,13 +18,11 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. */ -public class EntityRelationshipsResultResolver implements DataFetcher> { +public class EntityRelationshipsResultResolver + implements DataFetcher> { private final GraphClient _graphClient; @@ -35,24 +34,22 @@ public EntityRelationshipsResultResolver(final GraphClient graphClient) { public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - final RelationshipsInput input = bindArgument(environment.getArgument("input"), RelationshipsInput.class); + final RelationshipsInput input = + bindArgument(environment.getArgument("input"), RelationshipsInput.class); final List relationshipTypes = input.getTypes(); - final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = input.getDirection(); + final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = + input.getDirection(); final Integer start = input.getStart(); // Optional! final Integer count = input.getCount(); // Optional! - final RelationshipDirection resolvedDirection = RelationshipDirection.valueOf(relationshipDirection.toString()); - return CompletableFuture.supplyAsync(() -> mapEntityRelationships( - fetchEntityRelationships( - urn, - relationshipTypes, - resolvedDirection, - start, - count, - context.getActorUrn() - ), - resolvedDirection - )); + final RelationshipDirection resolvedDirection = + RelationshipDirection.valueOf(relationshipDirection.toString()); + return CompletableFuture.supplyAsync( + () -> + mapEntityRelationships( + fetchEntityRelationships( + urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), + resolvedDirection)); } private EntityRelationships fetchEntityRelationships( @@ -68,23 +65,28 @@ private EntityRelationships fetchEntityRelationships( private EntityRelationshipsResult mapEntityRelationships( final EntityRelationships entityRelationships, - final RelationshipDirection relationshipDirection - ) { + final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); result.setStart(entityRelationships.getStart()); result.setCount(entityRelationships.getCount()); result.setTotal(entityRelationships.getTotal()); - result.setRelationships(entityRelationships.getRelationships().stream().map(entityRelationship -> mapEntityRelationship( - com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf(relationshipDirection.name()), - entityRelationship) - ).collect(Collectors.toList())); + result.setRelationships( + entityRelationships.getRelationships().stream() + .map( + entityRelationship -> + mapEntityRelationship( + com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( + relationshipDirection.name()), + entityRelationship)) + .collect(Collectors.toList())); return result; } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { - final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); + final com.linkedin.datahub.graphql.generated.EntityRelationship result = + new com.linkedin.datahub.graphql.generated.EntityRelationship(); final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java index 6a32e0b14e313..d298c344240c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java @@ -8,31 +8,27 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; - /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeBatchResolver implements DataFetcher>> { - private final List> _entityTypes; - private final Function> _entitiesProvider; + private final List> _entityTypes; + private final Function> _entitiesProvider; - public EntityTypeBatchResolver( - final List> entityTypes, - final Function> entitiesProvider - ) { - _entityTypes = entityTypes; - _entitiesProvider = entitiesProvider; - } + public EntityTypeBatchResolver( + final List> entityTypes, + final Function> entitiesProvider) { + _entityTypes = entityTypes; + _entitiesProvider = entitiesProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List entities = _entitiesProvider.apply(environment); - return BatchLoadUtils.batchLoadEntitiesOfSameType(entities, _entityTypes, environment.getDataLoaderRegistry()); - } + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List entities = _entitiesProvider.apply(environment); + return BatchLoadUtils.batchLoadEntitiesOfSameType( + entities, _entityTypes, environment.getDataLoaderRegistry()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java index 29d5d78e0ea96..3c285f30661bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java @@ -5,64 +5,65 @@ import com.linkedin.datahub.graphql.generated.Entity; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import org.dataloader.DataLoader; /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeResolver implements DataFetcher> { - private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); - private final List> _entityTypes; - private final Function _entityProvider; + private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); + private final List> _entityTypes; + private final Function _entityProvider; - public EntityTypeResolver( - final List> entityTypes, - final Function entity - ) { - _entityTypes = entityTypes; - _entityProvider = entity; - } + public EntityTypeResolver( + final List> entityTypes, + final Function entity) { + _entityTypes = entityTypes; + _entityProvider = entity; + } + private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { + return environment.getField().getSelectionSet().getSelections().stream() + .filter( + selection -> { + if (!(selection instanceof graphql.language.Field)) { + return true; + } + return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); + }) + .count() + == 0; + } - private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { - return environment.getField().getSelectionSet().getSelections().stream().filter(selection -> { - if (!(selection instanceof graphql.language.Field)) { - return true; - } - return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); - }).count() == 0; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity resolvedEntity = _entityProvider.apply(environment); + if (resolvedEntity == null) { + return CompletableFuture.completedFuture(null); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final Entity resolvedEntity = _entityProvider.apply(environment); - if (resolvedEntity == null) { - return CompletableFuture.completedFuture(null); - } - - final Object javaObject = _entityProvider.apply(environment); + final Object javaObject = _entityProvider.apply(environment); - if (isOnlySelectingIdentityFields(environment)) { - return CompletableFuture.completedFuture(javaObject); - } + if (isOnlySelectingIdentityFields(environment)) { + return CompletableFuture.completedFuture(javaObject); + } - final com.linkedin.datahub.graphql.types.EntityType filteredEntity = Iterables.getOnlyElement(_entityTypes.stream() + final com.linkedin.datahub.graphql.types.EntityType filteredEntity = + Iterables.getOnlyElement( + _entityTypes.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); - return loader.load(key); - } + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java index 02a92544855a3..ee2f7c3abe97d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java @@ -3,41 +3,42 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a batch of urns. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a batch of urns. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeBatchResolver implements DataFetcher>> { - private final LoadableType _loadableType; - private final Function> _keyProvider; + private final LoadableType _loadableType; + private final Function> _keyProvider; - public LoadableTypeBatchResolver(final LoadableType loadableType, final Function> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeBatchResolver( + final LoadableType loadableType, + final Function> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List keys = _keyProvider.apply(environment); - if (keys == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.loadMany(keys); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List keys = _keyProvider.apply(environment); + if (keys == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.loadMany(keys); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java index 53702f9cafe8b..3868b1a35b64f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java @@ -3,40 +3,41 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeResolver implements DataFetcher> { - private final LoadableType _loadableType; - private final Function _keyProvider; + private final LoadableType _loadableType; + private final Function _keyProvider; - public LoadableTypeResolver(final LoadableType loadableType, final Function keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeResolver( + final LoadableType loadableType, + final Function keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final K key = _keyProvider.apply(environment); - if (key == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.load(key); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final K key = _keyProvider.apply(environment); + if (key == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java index a4867819a2401..e85eaca127d62 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.OwnerType; import com.linkedin.datahub.graphql.types.LoadableType; @@ -8,38 +9,41 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import org.dataloader.DataLoader; import java.util.stream.Collectors; -import com.google.common.collect.Iterables; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. */ public class OwnerTypeResolver implements DataFetcher> { - private final List> _loadableTypes; - private final Function _urnProvider; + private final List> _loadableTypes; + private final Function _urnProvider; - public OwnerTypeResolver(final List> loadableTypes, final Function urnProvider) { - _loadableTypes = loadableTypes; - _urnProvider = urnProvider; - } + public OwnerTypeResolver( + final List> loadableTypes, + final Function urnProvider) { + _loadableTypes = loadableTypes; + _urnProvider = urnProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final OwnerType ownerType = _urnProvider.apply(environment); - final LoadableType filteredEntity = Iterables.getOnlyElement(_loadableTypes.stream() + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final OwnerType ownerType = _urnProvider.apply(environment); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _loadableTypes.stream() .filter(entity -> ownerType.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - return loader.load(((Entity) ownerType).getUrn()); - } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + return loader.load(((Entity) ownerType).getUrn()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index f13ebf8373e91..0d00823697c25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -27,24 +29,21 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. - * The purpose of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" API - * to a single place. - * - * It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and limit arguments - * used for filtering the specific TimeSeries Aspects to be fetched. + * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. The purpose + * of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" + * API to a single place. * - * On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping - * a generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil - * be invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. + *

It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and + * limit arguments used for filtering the specific TimeSeries Aspects to be fetched. * + *

On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping a + * generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil be + * invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. */ @Slf4j -public class TimeSeriesAspectResolver implements DataFetcher>> { +public class TimeSeriesAspectResolver + implements DataFetcher>> { private final EntityClient _client; private final String _entityName; @@ -73,13 +72,13 @@ public TimeSeriesAspectResolver( _sort = sort; } - /** - * Check whether the actor is authorized to fetch the timeseries aspect given the resource urn - */ + /** Check whether the actor is authorized to fetch the timeseries aspect given the resource urn */ private boolean isAuthorized(QueryContext context, String urn) { - if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals( - Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized(context, Optional.of(new EntitySpec(_entityName, urn)), + if (_entityName.equals(Constants.DATASET_ENTITY_NAME) + && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(_entityName, urn)), PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); } return true; @@ -87,46 +86,62 @@ private boolean isAuthorized(QueryContext context, String urn) { @Override public CompletableFuture> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); - // Fetch the urn, assuming the parent has an urn field. - // todo: what if the parent urn isn't projected? - final String urn = ((Entity) environment.getSource()).getUrn(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + // Fetch the urn, assuming the parent has an urn field. + // todo: what if the parent urn isn't projected? + final String urn = ((Entity) environment.getSource()).getUrn(); - if (!isAuthorized(context, urn)) { - return Collections.emptyList(); - } + if (!isAuthorized(context, urn)) { + return Collections.emptyList(); + } - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - // Max number of aspects to return. - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; - final SortCriterion maybeSort = _sort; + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + // Max number of aspects to return. + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; + final SortCriterion maybeSort = _sort; - try { - // Step 1: Get aspects. - List aspects = - _client.getTimeseriesAspectValues(urn, _entityName, _aspectName, maybeStartTimeMillis, maybeEndTimeMillis, - maybeLimit, buildFilters(maybeFilters), maybeSort, context.getAuthentication()); + try { + // Step 1: Get aspects. + List aspects = + _client.getTimeseriesAspectValues( + urn, + _entityName, + _aspectName, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilters(maybeFilters), + maybeSort, + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve aspects from GMS", e); - } - }); + // Step 2: Bind profiles into GraphQL strong types. + return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve aspects from GMS", e); + } + }); } private Filter buildFilters(@Nullable FilterInput maybeFilters) { if (maybeFilters == null) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + maybeFilters.getAnd().stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java index 619ca95e7d9ed..bee46f8a18cf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.urn.CorpuserUrn; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddLinkResolver implements DataFetcher> { @@ -35,41 +33,42 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw String linkLabel = input.getLabel(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) + && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { - log.debug("Adding Link. input: {}", input.toString()); + log.debug("Adding Link. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.addLink( - linkUrl, - linkLabel, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add link to resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to add link to resource with input %s", input.toString()), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.addLink(linkUrl, linkLabel, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to add link to resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to add link to resource with input %s", input.toString()), e); + } + }); } - // Returns whether this is a glossary entity and whether you can edit this glossary entity with the + // Returns whether this is a glossary entity and whether you can edit this glossary entity with + // the // Manage all children or Manage direct children privileges private boolean canUpdateGlossaryEntityLinks(Urn targetUrn, QueryContext context) { - final boolean isGlossaryEntity = targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); + final boolean isGlossaryEntity = + targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); if (!isGlossaryEntity) { return false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 3f2dab0a5ba71..9c0d009ff9b0e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnerResolver implements DataFetcher> { @@ -42,28 +41,32 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw OwnerInput ownerInput = ownerInputBuilder.build(); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); - try { + try { - log.debug("Adding Owner. input: {}", input); + log.debug("Adding Owner. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(ownerInput), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owner to resource with input %s", input), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + ImmutableList.of(ownerInput), + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owner to resource with input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index 4e5b5bdb2a651..c64b2403364c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnersResolver implements DataFetcher> { @@ -29,37 +27,37 @@ public class AddOwnersResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final AddOwnersInput input = bindArgument(environment.getArgument("input"), AddOwnersInput.class); + final AddOwnersInput input = + bindArgument(environment.getArgument("input"), AddOwnersInput.class); List owners = input.getOwners(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - OwnerUtils.validateAddOwnerInput( - owners, - targetUrn, - _entityService - ); - try { - - log.debug("Adding Owners. input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - owners, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owners to resource with input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + OwnerUtils.validateAddOwnerInput(owners, targetUrn, _entityService); + try { + + log.debug("Adding Owners. input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + owners, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owners to resource with input %s", input), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java index 78d2341492b39..f4e3f7ed49056 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagResolver implements DataFetcher> { @@ -27,44 +25,54 @@ public class AddTagResolver implements DataFetcher> { @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { - if (!tagUrn.getEntityType().equals("tag")) { - log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals("tag")) { + log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.info("Adding Tag. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info("Adding Tag. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java index 7174f3edffee6..4135e774172c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,9 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagsResolver implements DataFetcher> { @@ -32,40 +30,47 @@ public class AddTagsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final AddTagsInput input = bindArgument(environment.getArgument("input"), AddTagsInput.class); - List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - tagUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { - log.info("Adding Tags. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - tagUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + LabelUtils.validateResourceAndLabel( + tagUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { + log.info("Adding Tags. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + tagUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java index 056b5db4324c3..a776fda558a42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,8 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermResolver implements DataFetcher> { @@ -25,39 +25,49 @@ public class AddTermResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java index 2f58b6b09e681..4fbe74a0349b4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -19,8 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermsResolver implements DataFetcher> { @@ -29,41 +29,48 @@ public class AddTermsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final AddTermsInput input = bindArgument(environment.getArgument("input"), AddTermsInput.class); - List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - termUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + LabelUtils.validateResourceAndLabel( + termUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - termUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + termUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 5beaeecae673f..94182835de159 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddOwnersResolver implements DataFetcher> { @@ -29,26 +28,30 @@ public class BatchAddOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchAddOwnersInput input = bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); + final BatchAddOwnersInput input = + bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); final List owners = input.getOwners(); final List resources = input.getResources(); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateOwners(owners); - validateInputResources(resources, context); + // First, validate the batch + validateOwners(owners); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchAddOwners(owners, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + try { + // Then execute the bulk add + batchAddOwners(owners, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } private void validateOwners(List owners) { @@ -67,23 +70,32 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be applied to subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be applied to subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddOwners(List owners, List resources, QueryContext context) { + private void batchAddOwners( + List owners, List resources, QueryContext context) { log.debug("Batch adding owners. owners: {}, resources: {}", owners, resources); try { - OwnerUtils.addOwnersToResources(owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.addOwnersToResources( + owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - owners, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + owners, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java index 9c5cddb3c50bc..239ada1653695 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTagsResolver implements DataFetcher> { @@ -36,62 +33,64 @@ public class BatchAddTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTagsInput input = bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTagsInput input = + bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTags(tagUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTagsToSingleSchemaField(context, resources, tagUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTags(tagUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTagsToSingleSchemaField(context, resources, tagUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding tags to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * tag to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the tag to one of its siblings. If that fails, keep trying all siblings until one passes or all + * fail. Then we throw if none succeed. */ private Boolean handleAddTagsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List tagUrns - ) { + @Nonnull final List tagUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, new HashSet<>(), siblingUrns); } /** - * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. - * Try adding until we attempt all siblings or one passes. Throw if none pass. + * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. Try + * adding until we attempt all siblings or one passes. Throw if none pass. */ private Boolean attemptBatchAddTagsWithSiblings( @Nonnull final List tagUrns, @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTagsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add tags for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTags(List tagUrns, List resources, QueryContext context) { - log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); - try { - LabelUtils.addTagsToResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + private void batchAddTags( + List tagUrns, List resources, QueryContext context) { + log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); + try { + LabelUtils.addTagsToResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); - } + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java index a46f37b110f4e..b6d799c13345d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTermsResolver implements DataFetcher> { @@ -36,49 +33,52 @@ public class BatchAddTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTermsInput input = bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTermsInput input = + bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTerms(termUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTermsToSingleSchemaField(context, resources, termUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTerms(termUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTermsToSingleSchemaField(context, resources, termUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding terms to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * term to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the term to one of its siblings. If that fails, keep trying all siblings until one passes or + * all fail. Then we throw if none succeed. */ private Boolean handleAddTermsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List termUrns - ) { + @Nonnull final List termUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, new HashSet<>(), siblingUrns); } /** @@ -90,8 +90,7 @@ private Boolean attemptBatchAddTermsWithSiblings( @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTermsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add terms for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTerms(List termUrns, List resources, QueryContext context) { + private void batchAddTerms( + List termUrns, List resources, QueryContext context) { log.debug("Batch adding Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.addTermsToResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.addTermsToResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index debd68646910f..30e04ac36ee0f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveOwnersResolver implements DataFetcher> { @@ -29,27 +28,33 @@ public class BatchRemoveOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchRemoveOwnersInput input = bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); + final BatchRemoveOwnersInput input = + bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List owners = input.getOwnerUrns(); final List resources = input.getResources(); - final Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + final Optional maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk remove + batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -62,26 +67,40 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be removed from subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be removed from subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveOwners(List ownerUrns, Optional maybeOwnershipTypeUrn, - List resources, QueryContext context) { + private void batchRemoveOwners( + List ownerUrns, + Optional maybeOwnershipTypeUrn, + List resources, + QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { - OwnerUtils.removeOwnersFromResources(ownerUrns.stream().map(UrnUtils::getUrn).collect( - Collectors.toList()), maybeOwnershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.removeOwnersFromResources( + ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + maybeOwnershipTypeUrn, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch remove Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch remove Owners %s to resources with urns %s!", + ownerUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java index ab432f0afcaec..7500f29a0c67f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTagsResolver implements DataFetcher> { @@ -28,26 +27,29 @@ public class BatchRemoveTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTagsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTagsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTags(List tagUrns, List resources, QueryContext context) { + private void batchRemoveTags( + List tagUrns, List resources, QueryContext context) { log.debug("Batch removing Tags. tags: {}, resources: {}", resources, tagUrns); try { - LabelUtils.removeTagsFromResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTagsFromResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java index c8870cc44bf9e..3706e4e911b17 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTermsResolver implements DataFetcher> { @@ -28,26 +27,29 @@ public class BatchRemoveTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTermsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTermsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTerms(List termUrns, List resources, QueryContext context) { + private void batchRemoveTerms( + List termUrns, List resources, QueryContext context) { log.debug("Batch removing Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.removeTermsFromResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTermsFromResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java index 9b6167c673d8d..551878371b489 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchSetDomainResolver implements DataFetcher> { @@ -30,25 +29,29 @@ public class BatchSetDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDomainInput input = bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); + final BatchSetDomainInput input = + bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); final String maybeDomainUrn = input.getDomainUrn(); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the domain - validateDomain(maybeDomainUrn); - validateInputResources(resources, context); + // First, validate the domain + validateDomain(maybeDomainUrn); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchSetDomains(maybeDomainUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchSetDomains(maybeDomainUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateDomain(@Nullable String maybeDomainUrn) { @@ -66,23 +69,31 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchSetDomains(String maybeDomainUrn, List resources, QueryContext context) { + private void batchSetDomains( + String maybeDomainUrn, List resources, QueryContext context) { log.debug("Batch adding Domains. domainUrn: {}, resources: {}", maybeDomainUrn, resources); try { - DomainUtils.setDomainForResources(maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), + DomainUtils.setDomainForResources( + maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - maybeDomainUrn, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + maybeDomainUrn, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java index 5961dc9087a63..e76617d119621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateDeprecationResolver implements DataFetcher> { @@ -30,23 +29,32 @@ public class BatchUpdateDeprecationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateDeprecationInput input = bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); + final BatchUpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the resources - validateInputResources(resources, context); + // First, validate the resources + validateInputResources(resources, context); - try { - // Then execute the bulk update - batchUpdateDeprecation(input.getDeprecated(), input.getNote(), input.getDecommissionTime(), resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk update + batchUpdateDeprecation( + input.getDeprecated(), + input.getNote(), + input.getDecommissionTime(), + resources, + context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -58,17 +66,25 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DeprecationUtils.isAuthorizedToUpdateDeprecationForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchUpdateDeprecation(boolean deprecated, + private void batchUpdateDeprecation( + boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List resources, QueryContext context) { - log.debug("Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", deprecated, note, decommissionTime, resources); + log.debug( + "Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", + deprecated, + note, + decommissionTime, + resources); try { DeprecationUtils.updateDeprecationForResources( deprecated, @@ -78,10 +94,14 @@ private void batchUpdateDeprecation(boolean deprecated, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch update deprecated to %s for resources with urns %s!", - deprecated, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch update deprecated to %s for resources with urns %s!", + deprecated, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 69b2b92fb9cca..5a25e6d83e648 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -14,9 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { @@ -26,24 +25,32 @@ public class BatchUpdateSoftDeletedResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateSoftDeletedInput input = bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); + final BatchUpdateSoftDeletedInput input = + bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); final List urns = input.getUrns(); final boolean deleted = input.getDeleted(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the entities exist - validateInputUrns(urns, context); + // First, validate the entities exist + validateInputUrns(urns, context); - try { - // Then execute the bulk soft delete - batchUpdateSoftDeleted(deleted, urns, context); - return true; - } catch (Exception e) { - log.error("Failed to perform batch soft delete against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform batch soft delete against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk soft delete + batchUpdateSoftDeleted(deleted, urns, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform batch soft delete against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform batch soft delete against input %s", input.toString()), + e); + } + }); } private void validateInputUrns(List urnStrs, QueryContext context) { @@ -55,10 +62,12 @@ private void validateInputUrns(List urnStrs, QueryContext context) { private void validateInputUrn(String urnStr, QueryContext context) { final Urn urn = UrnUtils.getUrn(urnStr); if (!DeleteUtils.isAuthorizedToDeleteEntity(context, urn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } if (!_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } } @@ -66,14 +75,12 @@ private void batchUpdateSoftDeleted(boolean removed, List urnStrs, Query log.debug("Batch soft deleting assets. urns: {}", urnStrs); try { DeleteUtils.updateStatusForResources( - removed, - urnStrs, - UrnUtils.getUrn(context.getActorUrn()), - _entityService); + removed, urnStrs, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to batch update soft deleted status entities with urns %s!", urnStrs), + String.format( + "Failed to batch update soft deleted status entities with urns %s!", urnStrs), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index 59d5d6939c04c..d0796389d2280 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -1,13 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.container.EditableContainerProperties; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.DomainProperties; @@ -30,148 +31,191 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DescriptionUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DescriptionUtils() { } + private DescriptionUtils() {} public static void updateFieldDescription( String newDescription, Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService - ) { - EditableSchemaMetadata editableSchemaMetadata = - (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); - - editableFieldInfo.setDescription(newDescription); - - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + EntityService entityService) { + EditableSchemaMetadata editableSchemaMetadata = + (EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); + + editableFieldInfo.setDescription(newDescription); + + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } public static void updateContainerDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = - (EditableContainerProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableContainerProperties()); + (EditableContainerProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableContainerProperties()); containerProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, containerProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + containerProperties, + actor, + entityService); } public static void updateDomainDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = - (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, entityService, null); + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + entityService, + null); if (domainProperties == null) { - // If there are no properties for the domain already, then we should throw since the properties model also requires a name. + // If there are no properties for the domain already, then we should throw since the + // properties model also requires a name. throw new IllegalArgumentException("Properties for this Domain do not yet exist!"); } domainProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + entityService); } public static void updateTagDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = - (TagProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); + (TagProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); if (tagProperties == null) { - // If there are no properties for the tag already, then we should throw since the properties model also requires a name. + // If there are no properties for the tag already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Tag do not yet exist!"); } tagProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); + persistAspect( + resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); } public static void updateCorpGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = - (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, entityService, new CorpGroupEditableInfo()); + (CorpGroupEditableInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + entityService, + new CorpGroupEditableInfo()); if (corpGroupEditableInfo != null) { corpGroupEditableInfo.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, corpGroupEditableInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + corpGroupEditableInfo, + actor, + entityService); } public static void updateGlossaryTermDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + entityService, + null); if (glossaryTermInfo == null) { - // If there are no properties for the term already, then we should throw since the properties model also requires a name. + // If there are no properties for the term already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Glossary Term do not yet exist!"); } - glossaryTermInfo.setDefinition(newDescription); // We call description 'definition' for glossary terms. Not great, we know. :( - persistAspect(resourceUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, entityService); + glossaryTermInfo.setDefinition( + newDescription); // We call description 'definition' for glossary terms. Not great, we know. + // :( + persistAspect( + resourceUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + entityService); } public static void updateGlossaryNodeDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setDefinition(newDescription); - persistAspect(resourceUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + entityService); } public static void updateNotebookDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableNotebookProperties notebookProperties = + (EditableNotebookProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + entityService, + null); if (notebookProperties != null) { notebookProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, notebookProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + notebookProperties, + actor, + entityService); } public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); @@ -179,51 +223,41 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateContainerInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateLabelInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput( - Urn corpUserUrn, - EntityService entityService - ) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { if (!entityService.exists(corpUserUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput( - Urn notebookUrn, - EntityService entityService) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { if (!entityService.exists(notebookUrn)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); @@ -231,11 +265,15 @@ public static Boolean validateNotebookInput( return true; } - public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateFieldDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -245,11 +283,14 @@ public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -259,25 +300,31 @@ public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContex orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateContainerDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - targetUrn.getEntityType(), - targetUrn.toString(), - orPrivilegeGroups); - } + public static boolean isAuthorizedToUpdateContainerDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } - public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -288,79 +335,122 @@ public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext cont } public static void updateMlModelDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelProperties editableProperties = + (EditableMLModelProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlModelGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelGroupProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelGroupProperties editableProperties = + (EditableMLModelGroupProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelGroupProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } + public static void updateMlFeatureDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureProperties editableProperties = + (EditableMLFeatureProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlFeatureTableDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureTableProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureTableProperties editableProperties = + (EditableMLFeatureTableProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureTableProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlPrimaryKeyDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLPrimaryKeyProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLPrimaryKeyProperties editableProperties = + (EditableMLPrimaryKeyProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLPrimaryKeyProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateDataProductDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, entityService, new DataProductProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + DataProductProperties properties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + entityService, + new DataProductProperties()); if (properties != null) { properties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + properties, + actor, + entityService); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e5e3a5a0ee42e..e4c5c132be4f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -19,11 +19,10 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { @@ -33,57 +32,78 @@ public class MoveDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final MoveDomainInput input = ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); + final MoveDomainInput input = + ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(input.getResourceUrn()); - final Urn newParentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Urn newParentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canManageDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canManageDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Resource is not a domain."); - } + try { + if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Resource is not a domain."); + } - DomainProperties properties = (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, - null - ); + DomainProperties properties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); - if (properties == null) { - throw new IllegalArgumentException("Domain properties do not exist."); - } + if (properties == null) { + throw new IllegalArgumentException("Domain properties do not exist."); + } - if (newParentDomainUrn != null) { - if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Parent entity is not a domain."); - } - if (!_entityService.exists(newParentDomainUrn)) { - throw new IllegalArgumentException("Parent entity does not exist."); - } - } + if (newParentDomainUrn != null) { + if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Parent entity is not a domain."); + } + if (!_entityService.exists(newParentDomainUrn)) { + throw new IllegalArgumentException("Parent entity does not exist."); + } + } - if (DomainUtils.hasNameConflict(properties.getName(), newParentDomainUrn, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in the destination domain. Please pick a unique name.", properties.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } + if (DomainUtils.hasNameConflict( + properties.getName(), newParentDomainUrn, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in the destination domain. Please pick a unique name.", + properties.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } - properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - MutationUtils.persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, properties, actor, _entityService); - return true; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to move domain {} to parent {} : {}", input.getResourceUrn(), input.getParentDomain(), e.getMessage()); - throw new RuntimeException(String.format("Failed to move domain %s to %s", input.getResourceUrn(), input.getParentDomain()), e); - } - }); + properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + MutationUtils.persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + properties, + actor, + _entityService); + return true; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to move domain {} to parent {} : {}", + input.getResourceUrn(), + input.getParentDomain(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to move domain %s to %s", + input.getResourceUrn(), input.getParentDomain()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java index 30bd940a7dfed..064b532a792c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java @@ -1,20 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -23,7 +21,8 @@ */ public class MutableTypeBatchResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); private final BatchMutableType _batchMutableType; @@ -33,21 +32,23 @@ public MutableTypeBatchResolver(final BatchMutableType batchMutableType @Override public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - final B[] input = bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); - - return CompletableFuture.supplyAsync(() -> { - Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); - - try { - return _batchMutableType.batchUpdate(input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error("Failed to perform batchUpdate", e); - throw new IllegalArgumentException(e); - } finally { - timer.stop(); - } - }); + final B[] input = + bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); + + return CompletableFuture.supplyAsync( + () -> { + Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); + + try { + return _batchMutableType.batchUpdate(input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error("Failed to perform batchUpdate", e); + throw new IllegalArgumentException(e); + } finally { + timer.stop(); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java index 115a68e808de6..c62282c906597 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.MutableType; import graphql.schema.DataFetcher; @@ -8,8 +10,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -18,28 +18,34 @@ */ public class MutableTypeResolver implements DataFetcher> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeResolver.class.getName()); - private final MutableType _mutableType; + private final MutableType _mutableType; - public MutableTypeResolver(final MutableType mutableType) { - _mutableType = mutableType; - } + public MutableTypeResolver(final MutableType mutableType) { + _mutableType = mutableType; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final String urn = environment.getArgument("urn"); - final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug(String.format("Mutating entity. input: %s", input)); - return _mutableType.update(urn, input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error(String.format("Failed to perform update against input %s", input) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final String urn = environment.getArgument("urn"); + final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug(String.format("Mutating entity. input: %s", input)); + return _mutableType.update(urn, input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error( + String.format("Failed to perform update against input %s", input) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index c862fcfa83594..4a915b2a477cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; @@ -19,49 +21,56 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class MutationUtils { - private MutationUtils() { } + private MutationUtils() {} - public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); + public static void persistAspect( + Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param urn * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithUrn(Urn urn, String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithUrn( + Urn urn, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); return setProposalProperties(proposal, urn.getEntityType(), aspectName, aspect); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param entityKey * @param entityType * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithKey(RecordTemplate entityKey, String entityType, - String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithKey( + RecordTemplate entityKey, String entityType, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKey)); return setProposalProperties(proposal, entityType, aspectName, aspect); } - private static MetadataChangeProposal setProposalProperties(MetadataChangeProposal proposal, - String entityType, String aspectName, RecordTemplate aspect) { + private static MetadataChangeProposal setProposalProperties( + MetadataChangeProposal proposal, + String entityType, + String aspectName, + RecordTemplate aspect) { proposal.setEntityType(entityType); proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); @@ -77,18 +86,16 @@ private static MetadataChangeProposal setProposalProperties(MetadataChangePropos } public static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); @@ -104,34 +111,37 @@ public static Boolean validateSubresourceExists( Urn targetUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (subResourceType.equals(SubResourceType.DATASET_FIELD)) { - SchemaMetadata schemaMetadata = (SchemaMetadata) entityService.getAspect(targetUrn, - Constants.SCHEMA_METADATA_ASPECT_NAME, 0); + SchemaMetadata schemaMetadata = + (SchemaMetadata) + entityService.getAspect(targetUrn, Constants.SCHEMA_METADATA_ASPECT_NAME, 0); if (schemaMetadata == null) { throw new IllegalArgumentException( - String.format("Failed to update %s & field %s. %s has no schema.", targetUrn, subResource, targetUrn) - ); + String.format( + "Failed to update %s & field %s. %s has no schema.", + targetUrn, subResource, targetUrn)); } Optional fieldMatch = - schemaMetadata.getFields().stream().filter(field -> field.getFieldPath().equals(subResource)).findFirst(); + schemaMetadata.getFields().stream() + .filter(field -> field.getFieldPath().equals(subResource)) + .findFirst(); if (!fieldMatch.isPresent()) { - throw new IllegalArgumentException(String.format( - "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", - targetUrn, subResource, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", + targetUrn, subResource, subResource)); } return true; } - throw new IllegalArgumentException(String.format( - "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", - targetUrn, subResource, SubResourceType.values() - )); + throw new IllegalArgumentException( + String.format( + "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", + targetUrn, subResource, SubResourceType.values())); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java index 23c08043af5d3..f84d1b3a66f6f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,9 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveLinkResolver implements DataFetcher> { @@ -24,36 +23,38 @@ public class RemoveLinkResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveLinkInput input = bindArgument(environment.getArgument("input"), RemoveLinkInput.class); + final RemoveLinkInput input = + bindArgument(environment.getArgument("input"), RemoveLinkInput.class); String linkUrl = input.getLinkUrl(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { - log.debug("Removing Link input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.removeLink( - linkUrl, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove link from resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to remove link from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { + log.debug("Removing Link input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.removeLink(linkUrl, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to remove link from resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to remove link from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 2d5faaab44458..9827aa0666d19 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveOwnerResolver implements DataFetcher> { @@ -27,36 +26,42 @@ public class RemoveOwnerResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveOwnerInput input = bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); + final RemoveOwnerInput input = + bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + Optional maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateRemoveInput( - targetUrn, - _entityService - ); - try { - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.removeOwnersFromResources( - ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove owner from resource with input {}", input); - throw new RuntimeException(String.format("Failed to remove owner from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateRemoveInput(targetUrn, _entityService); + try { + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.removeOwnersFromResources( + ImmutableList.of(ownerUrn), + maybeOwnershipTypeUrn, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to remove owner from resource with input {}", input); + throw new RuntimeException( + String.format( + "Failed to remove owner from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java index 33a95c3576061..7e2919e0ca1f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTagResolver implements DataFetcher> { @@ -26,44 +25,54 @@ public class RemoveTagResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - true - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + true); + try { - if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { - log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { + log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.debug("Removing Tag. input: %s", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTagsFromResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.debug("Removing Tag. input: %s", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTagsFromResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java index 8f18b0ecd6198..ec38360df6d8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTermResolver implements DataFetcher> { @@ -26,45 +25,55 @@ public class RemoveTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - true - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + true); - try { + try { - if (!termUrn.getEntityType().equals("glossaryTerm")) { - log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); - return false; - } + if (!termUrn.getEntityType().equals("glossaryTerm")) { + log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); + return false; + } - log.info(String.format("Removing Term. input: {}", input)); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTermsFromResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info(String.format("Removing Term. input: {}", input)); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTermsFromResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java index d6e6e5610da56..13a8427633cae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -12,19 +14,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class UpdateDescriptionResolver implements DataFetcher> { @@ -33,7 +30,8 @@ public class UpdateDescriptionResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final DescriptionUpdateInput input = bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); + final DescriptionUpdateInput input = + bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating description. input: {}", input.toString()); switch (targetUrn.getEntityType()) { @@ -67,380 +65,383 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return updateDataProductDescription(targetUrn, input, environment.getContext()); default: throw new RuntimeException( - String.format("Failed to update description. Unsupported resource type %s provided.", targetUrn)); + String.format( + "Failed to update description. Unsupported resource type %s provided.", targetUrn)); } } - private CompletableFuture updateContainerDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - DescriptionUtils.validateContainerInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateContainerDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateContainerDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + DescriptionUtils.validateContainerInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateContainerDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateDomainDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateDomainInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDomainDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateDomainDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateDomainInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDomainDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - // If updating schema field description fails, try again on a sibling until there are no more siblings to try. Then throw if necessary. + // If updating schema field description fails, try again on a sibling until there are no more + // siblings to try. Then throw if necessary. private Boolean attemptUpdateDatasetSchemaFieldDescription( @Nonnull final Urn targetUrn, @Nonnull final DescriptionUpdateInput input, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(targetUrn); try { - DescriptionUtils.validateFieldDescriptionInput(targetUrn, input.getSubResource(), input.getSubResourceType(), - _entityService); + DescriptionUtils.validateFieldDescriptionInput( + targetUrn, input.getSubResource(), input.getSubResourceType(), _entityService); final Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateFieldDescription(input.getDescription(), targetUrn, input.getSubResource(), actor, - _entityService); + DescriptionUtils.updateFieldDescription( + input.getDescription(), targetUrn, input.getSubResource(), actor, _entityService); return true; } catch (Exception e) { final Optional siblingUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, attemptedUrns); if (siblingUrn.isPresent()) { - log.warn("Failed to update description for input {}, trying sibling urn {} now.", input.toString(), siblingUrn.get()); - return attemptUpdateDatasetSchemaFieldDescription(siblingUrn.get(), input, context, attemptedUrns, siblingUrns); + log.warn( + "Failed to update description for input {}, trying sibling urn {} now.", + input.toString(), + siblingUrn.get()); + return attemptUpdateDatasetSchemaFieldDescription( + siblingUrn.get(), input, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } - private CompletableFuture updateDatasetSchemaFieldDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + private CompletableFuture updateDatasetSchemaFieldDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - if (input.getSubResourceType() == null) { - throw new IllegalArgumentException("Update description without subresource is not currently supported"); - } + if (input.getSubResourceType() == null) { + throw new IllegalArgumentException( + "Update description without subresource is not currently supported"); + } - List siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); + List siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); - return attemptUpdateDatasetSchemaFieldDescription(targetUrn, input, context, new HashSet<>(), siblingUrns); - }); + return attemptUpdateDatasetSchemaFieldDescription( + targetUrn, input, context, new HashSet<>(), siblingUrns); + }); } - private CompletableFuture updateTagDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateTagDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateTagDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateTagDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateGlossaryTermDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryTermDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryTermDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryTermDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateGlossaryNodeDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryNodeDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryNodeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryNodeDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateCorpGroupDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateCorpGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateCorpGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateCorpGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateNotebookDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateNotebookInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateNotebookDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateNotebookDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateNotebookInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateNotebookDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlModelDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlModelGroupDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlFeatureDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlPrimaryKeyDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlPrimaryKeyDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlPrimaryKeyDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlPrimaryKeyDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlFeatureTableDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureTableDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureTableDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureTableDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateDataProductDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDataProductDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateDataProductDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDataProductDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 0e316ac1296ee..dd44c2718b3a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -16,22 +19,18 @@ import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { @@ -41,178 +40,232 @@ public class UpdateNameResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final UpdateNameInput input = bindArgument(environment.getArgument("input"), UpdateNameInput.class); + final UpdateNameInput input = + bindArgument(environment.getArgument("input"), UpdateNameInput.class); Urn targetUrn = Urn.createFromString(input.getUrn()); log.info("Updating name. input: {}", input); - return CompletableFuture.supplyAsync(() -> { - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_entityService.exists(targetUrn)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + } - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermName(targetUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeName(targetUrn, input, environment.getContext()); - case Constants.DOMAIN_ENTITY_NAME: - return updateDomainName(targetUrn, input, environment.getContext()); - case Constants.CORP_GROUP_ENTITY_NAME: - return updateGroupName(targetUrn, input, environment.getContext()); - case Constants.DATA_PRODUCT_ENTITY_NAME: - return updateDataProductName(targetUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); - } - }); + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermName(targetUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeName(targetUrn, input, environment.getContext()); + case Constants.DOMAIN_ENTITY_NAME: + return updateDomainName(targetUrn, input, environment.getContext()); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, environment.getContext()); + case Constants.DATA_PRODUCT_ENTITY_NAME: + return updateDataProductName(targetUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update name. Unsupported resource type %s provided.", targetUrn)); + } + }); } private Boolean updateGlossaryTermName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { throw new IllegalArgumentException("Glossary Term does not exist"); } glossaryTermInfo.setName(input.getName()); Urn actor = UrnUtils.getUrn(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateGlossaryNodeName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateDomainName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateDomainName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageDomains(context)) { try { - DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, null); + DomainProperties domainProperties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (domainProperties == null) { throw new IllegalArgumentException("Domain does not exist"); } - if (DomainUtils.hasNameConflict(input.getName(), DomainUtils.getParentDomainSafely(domainProperties), context, _entityClient)) { + if (DomainUtils.hasNameConflict( + input.getName(), + DomainUtils.getParentDomainSafely(domainProperties), + context, + _entityClient)) { throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); } domainProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + _entityService); return true; } catch (DataHubGraphQLException e) { throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateGroupName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageUsersAndGroups(context)) { try { - CorpGroupInfo corpGroupInfo = (CorpGroupInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + CorpGroupInfo corpGroupInfo = + (CorpGroupInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.CORP_GROUP_INFO_ASPECT_NAME, + _entityService, + null); if (corpGroupInfo == null) { throw new IllegalArgumentException("Group does not exist"); } corpGroupInfo.setDisplayName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + persistAspect( + targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateDataProductName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { try { - DataProductProperties dataProductProperties = (DataProductProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, _entityService, null); + DataProductProperties dataProductProperties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (dataProductProperties == null) { throw new IllegalArgumentException("Data Product does not exist"); } - Domains dataProductDomains = (Domains) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); - if (dataProductDomains != null && dataProductDomains.hasDomains() && dataProductDomains.getDomains().size() > 0) { + Domains dataProductDomains = + (Domains) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); + if (dataProductDomains != null + && dataProductDomains.hasDomains() + && dataProductDomains.getDomains().size() > 0) { // get first domain since we only allow one domain right now Urn domainUrn = UrnUtils.getUrn(dataProductDomains.getDomains().get(0).toString()); - // if they can't edit a data product from either the parent domain permission or from permission on the data product itself, throw error + // if they can't edit a data product from either the parent domain permission or from + // permission on the data product itself, throw error if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn) && !DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } else { // should not happen since data products need to have a domain if (!DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } dataProductProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + dataProductProperties, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 5d78bc38eafe8..848118e6cc0f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -8,21 +11,17 @@ import com.linkedin.datahub.graphql.generated.UpdateParentNodeInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { @@ -32,54 +31,72 @@ public class UpdateParentNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final UpdateParentNodeInput input = bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); + final UpdateParentNodeInput input = + bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); final QueryContext context = environment.getContext(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryNode.", targetUrn, parentNodeUrn)); + if (!_entityService.exists(parentNodeUrn) + || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryNode.", + targetUrn, parentNodeUrn)); } } GlossaryNodeUrn finalParentNodeUrn = parentNodeUrn; - return CompletableFuture.supplyAsync(() -> { - Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - // need to be able to manage current parent node and new parent node - if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) - && GlossaryUtils.canManageChildrenEntities(context, finalParentNodeUrn, _entityClient)) { - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update parentNode. Unsupported resource type %s provided.", targetUrn)); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + // need to be able to manage current parent node and new parent node + if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) + && GlossaryUtils.canManageChildrenEntities( + context, finalParentNodeUrn, _entityClient)) { + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update parentNode. Unsupported resource type %s provided.", + targetUrn)); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private Boolean updateGlossaryTermParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { - // If there is no info aspect for the term already, then we should throw since the model also requires a name. + // If there is no info aspect for the term already, then we should throw since the model + // also requires a name. throw new IllegalArgumentException("Info for this Glossary Term does not yet exist!"); } @@ -89,12 +106,18 @@ private Boolean updateGlossaryTermParentNode( glossaryTermInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } @@ -102,11 +125,15 @@ private Boolean updateGlossaryNodeParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Info for this Glossary Node does not yet exist!"); } @@ -117,12 +144,18 @@ private Boolean updateGlossaryNodeParentNode( glossaryNodeInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 875bc43e7c100..53b215bce7746 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,15 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} - * instead. - */ +/** Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} instead. */ @Slf4j @RequiredArgsConstructor public class UpdateUserSettingResolver implements DataFetcher> { @@ -35,35 +31,46 @@ public class UpdateUserSettingResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateUserSettingInput input = bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); + final UpdateUserSettingInput input = + bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); UserSetting name = input.getName(); final boolean value = input.getValue(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - return CompletableFuture.supplyAsync(() -> { - try { - // In the future with more settings, we'll need to do a read-modify-write - // for now though, we can just write since there is only 1 setting - CorpUserSettings newSettings = new CorpUserSettings(); - newSettings.setAppearance(new CorpUserAppearanceSettings()); - if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { - newSettings.setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); - } else { - log.error("User Setting name {} not currently supported", name); - throw new RuntimeException(String.format("User Setting name %s not currently supported", name)); - } + return CompletableFuture.supplyAsync( + () -> { + try { + // In the future with more settings, we'll need to do a read-modify-write + // for now though, we can just write since there is only 1 setting + CorpUserSettings newSettings = new CorpUserSettings(); + newSettings.setAppearance(new CorpUserAppearanceSettings()); + if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { + newSettings.setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); + } else { + log.error("User Setting name {} not currently supported", name); + throw new RuntimeException( + String.format("User Setting name %s not currently supported", name)); + } - MetadataChangeProposal proposal = - buildMetadataChangeProposalWithUrn(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); - return true; - } catch (Exception e) { - log.error("Failed to perform user settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform user settings update against input %s", input.toString()), e); - } - }); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform user settings update against input %s", input.toString()), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 7d4c5bee61e19..3fffe9fa019e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -19,22 +20,21 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeleteUtils() { } + private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -45,11 +45,7 @@ public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, } public static void updateStatusForResources( - boolean removed, - List urnStrs, - Urn actor, - EntityService entityService - ) { + boolean removed, List urnStrs, Urn actor, EntityService entityService) { final List changes = new ArrayList<>(); for (String urnStr : urnStrs) { changes.add(buildSoftDeleteProposal(removed, urnStr, actor, entityService)); @@ -58,17 +54,13 @@ public static void updateStatusForResources( } private static MetadataChangeProposal buildSoftDeleteProposal( - boolean removed, - String urnStr, - Urn actor, - EntityService entityService - ) { - Status status = (Status) EntityUtils.getAspectFromEntity( - urnStr, - Constants.STATUS_ASPECT_NAME, - entityService, - new Status()); + boolean removed, String urnStr, Urn actor, EntityService entityService) { + Status status = + (Status) + EntityUtils.getAspectFromEntity( + urnStr, Constants.STATUS_ASPECT_NAME, entityService, new Status()); status.setRemoved(removed); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index bd82bbb8e514f..3114e5241711c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; @@ -22,22 +23,22 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; - - @Slf4j public class DeprecationUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeprecationUtils() { } + private DeprecationUtils() {} - public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDeprecationForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -53,11 +54,12 @@ public static void updateDeprecationForResources( @Nullable Long decommissionTime, List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildUpdateDeprecationProposal(deprecated, note, decommissionTime, resource, actor, entityService)); + changes.add( + buildUpdateDeprecationProposal( + deprecated, note, decommissionTime, resource, actor, entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } @@ -68,21 +70,11 @@ private static MetadataChangeProposal buildUpdateDeprecationProposal( @Nullable Long decommissionTime, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { String resourceUrn = resource.getResourceUrn(); - Deprecation deprecation = getDeprecation( - entityService, - resourceUrn, - actor, - note, - deprecated, - decommissionTime - ); + Deprecation deprecation = + getDeprecation(entityService, resourceUrn, actor, note, deprecated, decommissionTime); return MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn(resourceUrn), - Constants.DEPRECATION_ASPECT_NAME, - deprecation - ); + UrnUtils.getUrn(resourceUrn), Constants.DEPRECATION_ASPECT_NAME, deprecation); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 585fbdf53a2ba..fb88d6c29f662 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -30,7 +32,6 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; - import com.linkedin.r2.RemoteInvocationException; import java.util.ArrayList; import java.util.Collections; @@ -40,13 +41,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - // TODO: Move to consuming from DomainService. @Slf4j public class DomainUtils { @@ -54,17 +50,20 @@ public class DomainUtils { private static final String HAS_PARENT_DOMAIN_INDEX_FIELD_NAME = "hasParentDomain"; private static final String NAME_INDEX_FIELD_NAME = "name"; - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DomainUtils() { } + private DomainUtils() {} - public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -78,8 +77,8 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildSetDomainProposal(domainUrn, resource, actor, entityService)); @@ -88,27 +87,27 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.DOMAINS_ASPECT_NAME, - entityService, - new Domains()); + @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.DOMAINS_ASPECT_NAME, + entityService, + new Domains()); final UrnArray newDomains = new UrnArray(); if (domainUrn != null) { newDomains.add(domainUrn); } domains.setDomains(newDomains); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } public static void validateDomain(Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } } @@ -119,14 +118,12 @@ private static List buildRootDomainCriteria() { new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("false") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("") - .setCondition(Condition.IS_NULL) - ); + .setCondition(Condition.IS_NULL)); return criteria; } @@ -138,14 +135,12 @@ private static List buildParentDomainCriteria(@Nonnull final Urn pare new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("true") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue(parentDomainUrn.toString()) - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); return criteria; } @@ -158,36 +153,38 @@ private static Criterion buildNameCriterion(@Nonnull final String name) { } /** - * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain criterion. - * The reason for the OR on root is elastic can have a null|false value to represent an root domain in the index. + * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain + * criterion. The reason for the OR on root is elastic can have a null|false value to represent an + * root domain in the index. + * * @param name an optional name to AND in to each condition of the filter * @param parentDomainUrn the parent domain (null means root). * @return the Filter */ - public static Filter buildNameAndParentDomainFilter(@Nullable final String name, @Nullable final Urn parentDomainUrn) { + public static Filter buildNameAndParentDomainFilter( + @Nullable final String name, @Nullable final Urn parentDomainUrn) { if (parentDomainUrn == null) { - return new Filter().setOr( - new ConjunctiveCriterionArray( - buildRootDomainCriteria().stream().map(parentCriterion -> { - final CriterionArray array = new CriterionArray(parentCriterion); - if (name != null) { - array.add(buildNameCriterion(name)); - } - return new ConjunctiveCriterion().setAnd(array); - }).collect(Collectors.toList()) - ) - ); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + buildRootDomainCriteria().stream() + .map( + parentCriterion -> { + final CriterionArray array = new CriterionArray(parentCriterion); + if (name != null) { + array.add(buildNameCriterion(name)); + } + return new ConjunctiveCriterion().setAnd(array); + }) + .collect(Collectors.toList()))); } final CriterionArray andArray = new CriterionArray(buildParentDomainCriteria(parentDomainUrn)); if (name != null) { andArray.add(buildNameCriterion(name)); } - return new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(andArray) - ) - ); + return new Filter() + .setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(andArray))); } public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn) { @@ -196,6 +193,7 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn /** * Check if a domain has any child domains + * * @param domainUrn the URN of the domain to check * @param context query context (includes authorization context to authorize the request) * @param entityClient client used to perform the check @@ -204,18 +202,14 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn public static boolean hasChildDomains( @Nonnull final Urn domainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) throws RemoteInvocationException { + @Nonnull final EntityClient entityClient) + throws RemoteInvocationException { Filter parentDomainFilter = buildParentDomainFilter(domainUrn); // Search for entities matching parent domain // Limit count to 1 for existence check - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - parentDomainFilter, - null, - 0, - 1, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); return (searchResult.getNumEntities() > 0); } @@ -223,23 +217,18 @@ private static Map getDomainsByNameAndParent( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { final Filter filter = buildNameAndParentDomainFilter(name, parentDomainUrn); - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final Set domainUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Set domainUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); return entityClient.batchGetV2( DOMAIN_ENTITY_NAME, @@ -255,51 +244,63 @@ public static boolean hasNameConflict( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { - final Map entities = getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); + @Nonnull final EntityClient entityClient) { + final Map entities = + getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); // Even though we searched by name, do one more pass to check the name is unique - return entities.values().stream().anyMatch(entityResponse -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data(); - DomainProperties domainProperties = new DomainProperties(dataMap); - return (domainProperties.hasName() && domainProperties.getName().equals(name)); - } - return false; - }); + return entities.values().stream() + .anyMatch( + entityResponse -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); + DomainProperties domainProperties = new DomainProperties(dataMap); + return (domainProperties.hasName() && domainProperties.getName().equals(name)); + } + return false; + }); } @Nullable public static Entity getParentDomain( @Nonnull final Urn urn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { - final EntityResponse entityResponse = entityClient.getV2( - DOMAIN_ENTITY_NAME, - urn, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - - if (entityResponse != null && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); + final EntityResponse entityResponse = + entityClient.getV2( + DOMAIN_ENTITY_NAME, + urn, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve parent domain for entity %s", urn), e); + throw new RuntimeException( + String.format("Failed to retrieve parent domain for entity %s", urn), e); } return null; } /** - * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where moving a domain - * to the root leaves the parentDomain field set but makes hasParentDomain false. This helper makes sure that queries - * to elastic where hasParentDomain=false and parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where + * moving a domain to the root leaves the parentDomain field set but makes hasParentDomain false. + * This helper makes sure that queries to elastic where hasParentDomain=false and + * parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * * @param properties the domain properties aspect * @return the parentDomain or null */ @@ -307,4 +308,4 @@ public static Entity getParentDomain( public static Urn getParentDomainSafely(@Nonnull final DomainProperties properties) { return properties.hasParentDomain() ? properties.getParentDomain() : null; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java index 8aa4a8d756bea..15c93904fc3bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java @@ -3,7 +3,6 @@ import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -11,20 +10,22 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EmbedUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private EmbedUtils() { } + private EmbedUtils() {} - public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateEmbedForEntity( + @Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -33,4 +34,4 @@ public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn enti entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 655e5333cb34e..996bd3da120d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; @@ -15,32 +15,36 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class GlossaryUtils { - private GlossaryUtils() { } + private GlossaryUtils() {} /** - * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the user has global control - * of their Business Glossary to create, edit, move, and delete Terms and Nodes. + * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the + * user has global control of their Business Glossary to create, edit, move, and delete Terms and + * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** - * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes under a parent Node. - * They can do this with either the global MANAGE_GLOSSARIES privilege, or if they have the MANAGE_GLOSSARY_CHILDREN privilege - * on the relevant parent node in the Glossary. + * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes + * under a parent Node. They can do this with either the global MANAGE_GLOSSARIES privilege, or if + * they have the MANAGE_GLOSSARY_CHILDREN privilege on the relevant parent node in the Glossary. */ - public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, @Nonnull EntityClient entityClient) { + public static boolean canManageChildrenEntities( + @Nonnull QueryContext context, + @Nullable Urn parentNodeUrn, + @Nonnull EntityClient entityClient) { if (canManageGlossaries(context)) { return true; } @@ -48,28 +52,31 @@ public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @ return false; // if no parent node, we must rely on the canManageGlossaries method above } - //Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege - if (hasManagePrivilege(context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { + // Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege + if (hasManagePrivilege( + context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } - //Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no parent associated. + // Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no + // parent associated. Urn currentParentNodeUrn = parentNodeUrn; while (currentParentNodeUrn != null) { - if (hasManagePrivilege(context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { + if (hasManagePrivilege( + context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } currentParentNodeUrn = getParentUrn(currentParentNodeUrn, context, entityClient); } return false; - } - public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())) - )); + public static boolean hasManagePrivilege( + @Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -83,13 +90,24 @@ public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullabl * Returns the urn of the parent node for a given Glossary Term. Returns null if it doesn't exist. */ @Nullable - private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getTermParentUrn( + @Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_TERM_ENTITY_NAME, termUrn, - ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { - GlossaryTermInfo termInfo = new GlossaryTermInfo(response.getAspects() - .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + termUrn, + ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { + GlossaryTermInfo termInfo = + new GlossaryTermInfo( + response + .getAspects() + .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) + .getValue() + .data()); return termInfo.getParentNode(); } return null; @@ -102,13 +120,24 @@ private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext * Returns the urn of the parent node for a given Glossary Node. Returns null if it doesn't exist. */ @Nullable - private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getNodeParentUrn( + @Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_NODE_ENTITY_NAME, nodeUrn, - ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { - GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(response.getAspects() - .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + nodeUrn, + ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { + GlossaryNodeInfo nodeInfo = + new GlossaryNodeInfo( + response + .getAspects() + .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) + .getValue() + .data()); return nodeInfo.getParentNode(); } return null; @@ -118,17 +147,21 @@ private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext } /** - * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null otherwise. + * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null + * otherwise. */ @Nullable - public static Urn getParentUrn(@Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + public static Urn getParentUrn( + @Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { switch (urn.getEntityType()) { case Constants.GLOSSARY_TERM_ENTITY_NAME: return getTermParentUrn(urn, context, entityClient); case Constants.GLOSSARY_NODE_ENTITY_NAME: return getNodeParentUrn(urn, context, entityClient); default: - log.warn("Tried to get the parent node urn of a non-glossary entity type: {}", urn.getEntityType()); + log.warn( + "Tried to get the parent node urn of a non-glossary entity type: {}", + urn.getEntityType()); return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index a93c7d5b333da..8765b91f65d9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -13,8 +17,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.metadata.Constants; @@ -30,53 +32,56 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming GlossaryTermService, TagService. @Slf4j public class LabelUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LabelUtils() { } + private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, - Urn resourceUrn, - String subResource, - Urn actor, - EntityService entityService - ) { + Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermIfExists(terms, labelUrn); persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermIfExists(editableFieldInfo.getGlossaryTerms(), labelUrn); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } public static void removeTagsFromResources( - List tags, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List tags, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTagsProposal(tags, resource, actor, entityService)); @@ -85,11 +90,8 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List tagUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List tagUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTagsProposal(tagUrns, resource, actor, entityService)); @@ -98,11 +100,8 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List termUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List termUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTermsProposal(termUrns, resource, actor, entityService)); @@ -111,11 +110,8 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List termUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List termUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTermsProposal(termUrns, resource, actor, entityService)); @@ -128,12 +124,16 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -144,10 +144,15 @@ public static void addTermsToResource( persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -155,7 +160,12 @@ public static void addTermsToResource( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), labelUrns); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } @@ -181,17 +191,22 @@ private static GlossaryTermAssociationArray removeTermIfExists(GlossaryTerms ter return termArray; } - public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTags( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -201,19 +216,23 @@ public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Ur orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTerms( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType() - )) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -230,37 +249,56 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { for (Urn urn : labelUrns) { - validateResourceAndLabel(urn, resourceUrn, subResource, subResourceType, labelEntityType, entityService, isRemoving); + validateResourceAndLabel( + urn, + resourceUrn, + subResource, + subResourceType, + labelEntityType, + entityService, + isRemoving); } } - public static void validateLabel(Urn labelUrn, String labelEntityType, EntityService entityService) { + public static void validateLabel( + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn type does not match entity type %s..", - labelUrn, - labelEntityType)); + throw new IllegalArgumentException( + String.format( + "Failed to validate label with urn %s. Urn type does not match entity type %s..", + labelUrn, labelEntityType)); } if (!entityService.exists(labelUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } } // TODO: Move this out into a separate utilities class. - public static void validateResource(Urn resourceUrn, String subResource, SubResourceType subResourceType, EntityService entityService) { + public static void validateResource( + Urn resourceUrn, + String subResource, + SubResourceType subResourceType, + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); } if ((subResource != null && subResource.length() > 0) || subResourceType != null) { if (subResource == null || subResource.length() == 0) { - throw new IllegalArgumentException(String.format( - "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", resourceUrn, subResourceType)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", + resourceUrn, subResourceType)); } if (subResourceType == null) { - throw new IllegalArgumentException(String.format( - "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", resourceUrn, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", + resourceUrn, subResource)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); } @@ -273,8 +311,7 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); } @@ -282,11 +319,8 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildAddTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -297,11 +331,8 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildRemoveTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -312,82 +343,90 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } removeTagsIfExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - entityService, - new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } addTagsIfNotExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } - private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throws URISyntaxException { + private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) + throws URISyntaxException { if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } @@ -396,7 +435,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw List tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -415,11 +455,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw } private static MetadataChangeProposal buildAddTermsProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity return buildAddTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -430,11 +467,8 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity return buildRemoveTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -445,14 +479,15 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -460,20 +495,23 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } addTermsIfNotExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -481,42 +519,48 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermsIfExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermsIfExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) @@ -547,7 +591,8 @@ private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) } } - private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms terms, List termUrns) { + private static GlossaryTermAssociationArray removeTermsIfExists( + GlossaryTerms terms, List termUrns) { if (!terms.hasTerms()) { terms.setTerms(new GlossaryTermAssociationArray()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index 9ec0f9b8e6070..b93c72edbcfc5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; @@ -9,59 +12,59 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.entity.EntityUtils; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class LinkUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LinkUtils() { } + private LinkUtils() {} public static void addLink( - String linkUrl, - String linkLabel, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); addLink(institutionalMemoryAspect, linkUrl, linkLabel, actor); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } public static void removeLink( - String linkUrl, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); removeLink(institutionalMemoryAspect, linkUrl); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } - private static void addLink(InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { + private static void addLink( + InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { if (!institutionalMemoryAspect.hasElements()) { institutionalMemoryAspect.setElements(new InstitutionalMemoryMetadataArray()); } @@ -90,10 +93,12 @@ private static void removeLink(InstitutionalMemory institutionalMemoryAspect, St } public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -104,21 +109,22 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, - Urn resourceUrn, - EntityService entityService - ) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); } catch (Exception e) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Expected a corp group urn.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Expected a corp group urn.", + resourceUrn)); } if (!entityService.exists(resourceUrn)) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Resource does not exist.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Resource does not exist.", + resourceUrn)); } return true; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 7233995804423..15c3c14c7b8f6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -28,104 +30,124 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static final String SYSTEM_ID = "__system__"; - private OwnerUtils() { } + private OwnerUtils() {} public static void addOwnersToResources( List owners, List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddOwnersProposal(owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + changes.add( + buildAddOwnersProposal( + owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } public static void removeOwnersFromResources( - List ownerUrns, Optional maybeOwnershipTypeUrn, List resources, + List ownerUrns, + Optional maybeOwnershipTypeUrn, + List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveOwnersProposal(ownerUrns, maybeOwnershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), - actor, entityService)); + changes.add( + buildRemoveOwnersProposal( + ownerUrns, + maybeOwnershipTypeUrn, + UrnUtils.getUrn(resource.getResourceUrn()), + actor, + entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } - - static MetadataChangeProposal buildAddOwnersProposal(List owners, Urn resourceUrn, EntityService entityService) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, entityService, - new Ownership()); + static MetadataChangeProposal buildAddOwnersProposal( + List owners, Urn resourceUrn, EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); for (OwnerInput input : owners) { - addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn())); + addOwner( + ownershipAspect, + UrnUtils.getUrn(input.getOwnerUrn()), + input.getType(), + UrnUtils.getUrn(input.getOwnershipTypeUrn())); } - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } public static MetadataChangeProposal buildRemoveOwnersProposal( - List ownerUrns, Optional maybeOwnershipTypeUrn, Urn resourceUrn, + List ownerUrns, + Optional maybeOwnershipTypeUrn, + Urn resourceUrn, Urn actor, - EntityService entityService - ) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, - entityService, - new Ownership()); + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwner( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf(owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipUrn); + } - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + // Fall back to mapping deprecated type to the new ownership entity, if it matches remove + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipUrn.toString()); + }); Owner newOwner = new Owner(); // For backwards compatibility we have to always set the deprecated type. // If the type exists we assume it's an old ownership type that we can map to. // Else if it's a net new custom ownership type set old type to CUSTOM. - com.linkedin.common.OwnershipType gmsType = type != null ? com.linkedin.common.OwnershipType.valueOf(type.toString()) - : com.linkedin.common.OwnershipType.CUSTOM; + com.linkedin.common.OwnershipType gmsType = + type != null + ? com.linkedin.common.OwnershipType.valueOf(type.toString()) + : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); newOwner.setTypeUrn(ownershipUrn); @@ -135,8 +157,8 @@ private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipT ownershipAspect.setOwners(ownerArray); } - private static void removeOwnersIfExists(Ownership ownership, List ownerUrns, - Optional maybeOwnershipTypeUrn) { + private static void removeOwnersIfExists( + Ownership ownership, List ownerUrns, Optional maybeOwnershipTypeUrn) { if (!ownership.hasOwners()) { ownership.setOwners(new OwnerArray()); } @@ -144,23 +166,26 @@ private static void removeOwnersIfExists(Ownership ownership, List ownerUrn OwnerArray ownerArray = ownership.getOwners(); for (Urn ownerUrn : ownerUrns) { if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf(owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); + ownerArray.removeIf( + owner -> { + // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) + + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); + } + + // Fall back to mapping deprecated type to the new ownership entity, if it matches + // remove + return mapOwnershipTypeToEntity( + OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(maybeOwnershipTypeUrn.get().toString()); + }); } else { ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); } @@ -168,10 +193,12 @@ private static void removeOwnersIfExists(Ownership ownership, List ownerUrn } public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -182,10 +209,7 @@ public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, } public static Boolean validateAddOwnerInput( - List owners, - Urn resourceUrn, - EntityService entityService - ) { + List owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); if (!result) { @@ -196,13 +220,12 @@ public static Boolean validateAddOwnerInput( } public static Boolean validateAddOwnerInput( - OwnerInput owner, - Urn resourceUrn, - EntityService entityService - ) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } validateOwner(owner, entityService); @@ -210,45 +233,55 @@ public static Boolean validateAddOwnerInput( return true; } - public static void validateOwner( - OwnerInput owner, - EntityService entityService - ) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); - if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) + && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp group urn, found %s", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp group urn, found %s", + ownerUrn)); } - if (OwnerEntityType.CORP_USER.equals(ownerEntityType) && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_USER.equals(ownerEntityType) + && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp user urn, found %s.", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp user urn, found %s.", + ownerUrn)); } if (!entityService.exists(ownerUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Owner with urn %s does not exist.", ownerUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Owner with urn %s does not exist.", + ownerUrn)); } - if (owner.getOwnershipTypeUrn() != null && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Custom Ownership type with " - + "urn %s does not exist.", owner.getOwnershipTypeUrn())); + if (owner.getOwnershipTypeUrn() != null + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Custom Ownership type with " + + "urn %s does not exist.", + owner.getOwnershipTypeUrn())); } if (owner.getType() == null && owner.getOwnershipTypeUrn() == null) { - throw new IllegalArgumentException("Failed to change ownership for resource(s). Expected either " - + "type or ownershipTypeUrn to be specified."); + throw new IllegalArgumentException( + "Failed to change ownership for resource(s). Expected either " + + "type or ownershipTypeUrn to be specified."); } } - public static Boolean validateRemoveInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateRemoveInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } return true; } @@ -264,15 +297,17 @@ public static void addCreatorAsOwner( String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { - throw new RuntimeException(String.format("Unknown ownership type urn %s", ownershipTypeUrn)); + throw new RuntimeException( + String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), + ImmutableList.of( + new OwnerInput( + actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), ImmutableList.of(new ResourceRefInput(urn, null, null)), actorUrn, - entityService - ); + entityService); } catch (Exception e) { log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java index f740836694dbe..0dd737d3b2292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java @@ -1,32 +1,35 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; + import com.linkedin.common.Siblings; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import javax.annotation.Nonnull; public class SiblingsUtils { - private SiblingsUtils() { } + private SiblingsUtils() {} - public static List getSiblingUrns(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { - final Siblings siblingAspectOfEntity = (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + public static List getSiblingUrns( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + final Siblings siblingAspectOfEntity = + (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { return siblingAspectOfEntity.getSiblings(); } return new ArrayList<>(); } - public static Optional getNextSiblingUrn(@Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { - final List unusedSiblingUrns = siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); + public static Optional getNextSiblingUrn( + @Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { + final List unusedSiblingUrns = + siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); return unusedSiblingUrns.stream().findFirst(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index c0fe697c6654c..abc479ed18ebf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -10,8 +17,6 @@ import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,22 +35,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for reporting Asset Operations - */ +/** Resolver used for reporting Asset Operations */ @Slf4j @RequiredArgsConstructor public class ReportOperationResolver implements DataFetcher> { - private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of( - DATASET_ENTITY_NAME - ); + private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of(DATASET_ENTITY_NAME); private final EntityClient _entityClient; @@ -53,32 +48,36 @@ public class ReportOperationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final ReportOperationInput input = bindArgument(environment.getArgument("input"), ReportOperationInput.class); - - return CompletableFuture.supplyAsync(() -> { - - Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - - if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - validateInput(entityUrn, input); - - try { - // Create an MCP to emit the operation - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, OPERATION_ASPECT_NAME, - mapOperation(input, context)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to report operation. {}", e.getMessage()); - throw new RuntimeException("Failed to report operation", e); - } - }); + final ReportOperationInput input = + bindArgument(environment.getArgument("input"), ReportOperationInput.class); + + return CompletableFuture.supplyAsync( + () -> { + Urn entityUrn = UrnUtils.getUrn(input.getUrn()); + + if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + validateInput(entityUrn, input); + + try { + // Create an MCP to emit the operation + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + entityUrn, OPERATION_ASPECT_NAME, mapOperation(input, context)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to report operation. {}", e.getMessage()); + throw new RuntimeException("Failed to report operation", e); + } + }); } - private Operation mapOperation(final ReportOperationInput input, final QueryContext context) throws URISyntaxException { + private Operation mapOperation(final ReportOperationInput input, final QueryContext context) + throws URISyntaxException { final Operation result = new Operation(); result.setActor(UrnUtils.getUrn(context.getActorUrn())); @@ -86,13 +85,17 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont result.setCustomOperationType(input.getCustomOperationType(), SetMode.IGNORE_NULL); result.setNumAffectedRows(input.getNumAffectedRows(), SetMode.IGNORE_NULL); - long timestampMillis = input.getTimestampMillis() != null ? input.getTimestampMillis() : System.currentTimeMillis(); + long timestampMillis = + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(); result.setLastUpdatedTimestamp(timestampMillis); result.setTimestampMillis(timestampMillis); result.setSourceType(OperationSourceType.valueOf(input.getSourceType().toString())); if (input.getPartition() != null) { - result.setPartitionSpec(new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); + result.setPartitionSpec( + new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); } if (input.getCustomProperties() != null) { @@ -102,7 +105,8 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont return result; } - private StringMap mapCustomProperties(final List properties) throws URISyntaxException { + private StringMap mapCustomProperties(final List properties) + throws URISyntaxException { final StringMap result = new StringMap(); for (StringMapEntryInput entry : properties) { result.put(entry.getKey(), entry.getValue()); @@ -113,16 +117,21 @@ private StringMap mapCustomProperties(final List properties private void validateInput(final Urn entityUrn, final ReportOperationInput input) { if (!SUPPORTED_ENTITY_TYPES.contains(entityUrn.getEntityType())) { throw new DataHubGraphQLException( - String.format("Unable to report operation. Invalid entity type %s provided.", entityUrn.getEntityType()), + String.format( + "Unable to report operation. Invalid entity type %s provided.", + entityUrn.getEntityType()), DataHubGraphQLErrorCode.BAD_REQUEST); } } - private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToReportOperationForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -131,4 +140,4 @@ private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, resourceUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java index 4cfe58072aae9..a0cffa5eca44c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -16,17 +18,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class CreateOwnershipTypeResolver implements DataFetcher> { +public class CreateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateOwnershipTypeInput input = bindArgument(environment.getArgument("input"), CreateOwnershipTypeInput.class); @@ -36,19 +37,25 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = _ownershipTypeService.createOwnershipType(input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - return createOwnershipType(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = + _ownershipTypeService.createOwnershipType( + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + return createOwnershipType(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - private OwnershipTypeEntity createOwnershipType(@Nonnull final Urn urn, - @Nonnull final CreateOwnershipTypeInput input) { + private OwnershipTypeEntity createOwnershipType( + @Nonnull final Urn urn, @Nonnull final CreateOwnershipTypeInput input) { return OwnershipTypeEntity.builder() .setUrn(urn.toString()) .setType(EntityType.CUSTOM_OWNERSHIP_TYPE) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java index 87cf70193d7fd..c5bb58a7d4b2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java @@ -12,7 +12,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteOwnershipTypeResolver implements DataFetcher> { @@ -26,21 +25,26 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn urn = UrnUtils.getUrn(ownershipTypeUrn); // By default, delete references final boolean deleteReferences = - environment.getArgument("deleteReferences") == null ? true : environment.getArgument("deleteReferences"); + environment.getArgument("deleteReferences") == null + ? true + : environment.getArgument("deleteReferences"); if (!AuthorizationUtils.canManageOwnershipTypes(context)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.deleteOwnershipType(urn, deleteReferences, context.getAuthentication()); - log.info(String.format("Successfully deleted ownership type %s with urn", urn)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.deleteOwnershipType( + urn, deleteReferences, context.getAuthentication()); + log.info(String.format("Successfully deleted ownership type %s with urn", urn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 70441815f0a74..1c8f43a490173 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -24,18 +26,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListOwnershipTypesResolver implements - DataFetcher> { +public class ListOwnershipTypesResolver + implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,43 +42,47 @@ public class ListOwnershipTypesResolver implements private final EntityClient _entityClient; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListOwnershipTypesInput input = bindArgument(environment.getArgument("input"), - ListOwnershipTypesInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - + final ListOwnershipTypesInput input = + bindArgument(environment.getArgument("input"), ListOwnershipTypesInput.class); - try { + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - final SearchResult gmsResult = _entityClient.search( - Constants.OWNERSHIP_TYPE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { - final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setOwnershipTypes(mapUnresolvedOwnershipTypes(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list custom ownership types", e); - } + final SearchResult gmsResult = + _entityClient.search( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - }); + final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setOwnershipTypes( + mapUnresolvedOwnershipTypes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list custom ownership types", e); + } + }); } private List mapUnresolvedOwnershipTypes(List entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 43fd249304397..839121a295d9a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,17 +19,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class UpdateOwnershipTypeResolver implements DataFetcher> { +public class UpdateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final UpdateOwnershipTypeInput input = @@ -39,27 +40,35 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.updateOwnershipType(urn, input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.updateOwnershipType( + urn, + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); + return getOwnershipType(urn, context.getAuthentication()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private OwnershipTypeEntity getOwnershipType(@Nonnull final Urn urn, - @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); + private OwnershipTypeEntity getOwnershipType( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + final EntityResponse maybeResponse = + _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", + String.format( + "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } return OwnershipTypeMapper.map(maybeResponse); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java index 485d40e60547e..567745b894ca9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Resolver responsible for hard deleting a particular DataHub access control policy. - */ +/** Resolver responsible for hard deleting a particular DataHub access control policy. */ public class DeletePolicyResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,18 +24,24 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final String policyUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(policyUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return policyUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against policy with urn %s", policyUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return policyUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against policy with urn %s", policyUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 11f7793db82c8..3328eff2bdf45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.EntitySpec; @@ -14,17 +16,15 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - /** - * Resolver to support the getGrantedPrivileges end point - * Fetches all privileges that are granted for the given actor for the given resource (optional) + * Resolver to support the getGrantedPrivileges end point Fetches all privileges that are granted + * for the given actor for the given resource (optional) */ public class GetGrantedPrivilegesResolver implements DataFetcher> { @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final GetGrantedPrivilegesInput input = @@ -33,22 +33,27 @@ public CompletableFuture get(final DataFetchingEnvironment environme if (!isAuthorized(context, actor)) { throw new AuthorizationException("Unauthorized to get privileges for the given author."); } - final Optional resourceSpec = Optional.ofNullable(input.getResourceSpec()) - .map(spec -> new EntitySpec(EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); + final Optional resourceSpec = + Optional.ofNullable(input.getResourceSpec()) + .map( + spec -> + new EntitySpec( + EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); if (context.getAuthorizer() instanceof AuthorizerChain) { - DataHubAuthorizer dataHubAuthorizer = ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); + DataHubAuthorizer dataHubAuthorizer = + ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); List privileges = dataHubAuthorizer.getGrantedPrivileges(actor, resourceSpec); - return CompletableFuture.supplyAsync(() -> Privileges.builder() - .setPrivileges(privileges) - .build()); + return CompletableFuture.supplyAsync( + () -> Privileges.builder().setPrivileges(privileges).build()); } throw new UnsupportedOperationException( - String.format("GetGrantedPrivileges function is not supported on authorizer of type %s", + String.format( + "GetGrantedPrivileges function is not supported on authorizer of type %s", context.getAuthorizer().getClass().getSimpleName())); } private boolean isAuthorized(final QueryContext context, final String actor) { return actor.equals(context.getActorUrn()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index b44da1c2f832c..87832b8c3aa40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - public class ListPoliciesResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -30,18 +29,22 @@ public ListPoliciesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (PolicyAuthUtils.canManagePolicies(context)) { - final ListPoliciesInput input = bindArgument(environment.getArgument("input"), ListPoliciesInput.class); + final ListPoliciesInput input = + bindArgument(environment.getArgument("input"), ListPoliciesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return _policyFetcher.fetchPolicies(start, query, count, context.getAuthentication()) - .thenApply(policyFetchResult -> { + return _policyFetcher + .fetchPolicies(start, query, count, context.getAuthentication()) + .thenApply( + policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); result.setStart(start); result.setCount(count); @@ -50,14 +53,18 @@ public CompletableFuture get(final DataFetchingEnvironment e return result; }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final List policies) { - return policies.stream().map(policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); - mappedPolicy.setUrn(policy.getUrn().toString()); - return mappedPolicy; - }).collect(Collectors.toList()); + return policies.stream() + .map( + policy -> { + Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + mappedPolicy.setUrn(policy.getUrn().toString()); + return mappedPolicy; + }) + .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index dcc5d1fd23302..d0446d218dac6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), + authorizer); } - private PolicyAuthUtils() { } + private PolicyAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index 6dcc143a1a3af..dcdf78ebc15bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,10 +19,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - public class UpsertPolicyResolver implements DataFetcher> { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; @@ -38,7 +37,8 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final Optional policyUrn = Optional.ofNullable(environment.getArgument("urn")); - final PolicyUpdateInput input = bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); + final PolicyUpdateInput input = + bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; @@ -48,7 +48,9 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (policyUrn.isPresent()) { // Update existing policy - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); } else { // Create new policy // Since we are creating a new Policy, we need to generate a unique UUID. @@ -58,21 +60,29 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Policy key. final DataHubPolicyKey key = new DataHubPolicyKey(); key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithKey( + key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); } - return CompletableFuture.supplyAsync(() -> { - try { - String urn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return urn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String urn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return urn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index b9a6bf07be8c8..a350fb91f9d3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterion; @@ -9,7 +10,6 @@ import com.linkedin.datahub.graphql.generated.PolicyMatchFilter; import com.linkedin.datahub.graphql.generated.PolicyState; import com.linkedin.datahub.graphql.generated.PolicyType; -import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.ResourceFilter; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -20,9 +20,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link com.linkedin.datahub.graphql.generated.Policy}. + * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link + * com.linkedin.datahub.graphql.generated.Policy}. */ public class PolicyInfoPolicyMapper implements ModelMapper { @@ -56,16 +56,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { result.setResourceOwners(actorFilter.isResourceOwners()); UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -87,14 +91,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -102,7 +112,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index cb323b60dd465..d82d71295d41b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -19,11 +19,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. - */ -public class PolicyUpdateInputInfoMapper implements ModelMapper { +/** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ +public class PolicyUpdateInputInfoMapper + implements ModelMapper { public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); @@ -52,13 +50,21 @@ private DataHubActorFilter mapActors(final ActorFilterInput actorInput) { result.setAllUsers(actorInput.getAllUsers()); result.setResourceOwners(actorInput.getResourceOwners()); if (actorInput.getResourceOwnersTypes() != null) { - result.setResourceOwnersTypes(new UrnArray(actorInput.getResourceOwnersTypes().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setResourceOwnersTypes( + new UrnArray( + actorInput.getResourceOwnersTypes().stream() + .map(this::createUrn) + .collect(Collectors.toList()))); } if (actorInput.getGroups() != null) { - result.setGroups(new UrnArray(actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setGroups( + new UrnArray( + actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); } if (actorInput.getUsers() != null) { - result.setUsers(new UrnArray(actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setUsers( + new UrnArray( + actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); } return result; } @@ -83,19 +89,26 @@ private DataHubResourceFilter mapResources(final ResourceFilterInput resourceInp } private PolicyMatchFilter mapFilter(final PolicyMatchFilterInput filter) { - return new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray(filter.getCriteria() - .stream() - .map(criterion -> new PolicyMatchCriterion().setField(criterion.getField()) - .setValues(new StringArray(criterion.getValues())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name()))) - .collect(Collectors.toList()))); + return new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + filter.getCriteria().stream() + .map( + criterion -> + new PolicyMatchCriterion() + .setField(criterion.getField()) + .setValues(new StringArray(criterion.getValues())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name()))) + .collect(Collectors.toList()))); } private Urn createUrn(String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urnStr %s into an URN object", urnStr), e); + throw new RuntimeException( + String.format("Failed to convert urnStr %s into an URN object", urnStr), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java index 524caf14e9afe..8e0ee335e09f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -18,16 +20,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreatePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { @@ -35,7 +35,8 @@ public CompletableFuture get(final DataFetchingEnvironment environment) "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); } - final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final CreatePostInput input = + bindArgument(environment.getArgument("input"), CreatePostInput.class); final PostType type = input.getPostType(); final UpdatePostContentInput content = input.getContent(); final PostContentType contentType = content.getContentType(); @@ -45,16 +46,21 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final UpdateMediaInput updateMediaInput = content.getMedia(); final Authentication authentication = context.getAuthentication(); - Media media = updateMediaInput == null ? null - : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); - PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); - - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.createPost(type.toString(), postContent, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + Media media = + updateMediaInput == null + ? null + : _postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.createPost(type.toString(), postContent, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java index d3cd0126fb852..7ab5d1381a1b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java @@ -13,14 +13,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeletePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canManageGlobalAnnouncements(context)) { @@ -31,12 +31,13 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final Urn postUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.deletePost(postUrn, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.deletePost(postUrn, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 59f2b458fdc90..5292adbe3aac3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,10 +25,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListPostsResolver implements DataFetcher> { @@ -36,38 +35,58 @@ public class ListPostsResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final ListPostsInput input = + bindArgument(environment.getArgument("input"), ListPostsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); - // First, get all Post Urns. - final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + // First, get all Post Urns. + final SearchResult gmsResult = + _entityClient.search( + POST_ENTITY_NAME, + query, + null, + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all Posts. - final Map entities = _entityClient.batchGetV2(POST_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, authentication); + // Then, get and hydrate all Posts. + final Map entities = + _entityClient.batchGetV2( + POST_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + authentication); - final ListPostsResult result = new ListPostsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list posts", e); - } - }); + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts( + entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 27de443bc100a..48f31fb75d371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -7,8 +9,8 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateQueryInput; -import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.CreateQuerySubjectInput; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.types.query.QueryMapper; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QueryLanguage; @@ -22,9 +24,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateQueryResolver implements DataFetcher> { @@ -32,40 +31,49 @@ public class CreateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateQueryInput input = bindArgument(environment.getArgument("input"), CreateQueryInput.class); + final CreateQueryInput input = + bindArgument(environment.getArgument("input"), CreateQueryInput.class); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!AuthorizationUtils.canCreateQuery(input.getSubjects() - .stream() - .map(CreateQuerySubjectInput::getDatasetUrn).map(UrnUtils::getUrn) - .collect(Collectors.toList()), context)) { - throw new AuthorizationException( - "Unauthorized to create Query. Please contact your DataHub administrator for more information."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateQuery( + input.getSubjects().stream() + .map(CreateQuerySubjectInput::getDatasetUrn) + .map(UrnUtils::getUrn) + .collect(Collectors.toList()), + context)) { + throw new AuthorizationException( + "Unauthorized to create Query. Please contact your DataHub administrator for more information."); + } - try { - final Urn queryUrn = _queryService.createQuery( - input.getProperties().getName(), - input.getProperties().getDescription(), - QuerySource.MANUAL, - new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())), - input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()), - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new Query from input %s", input), e); - } - }); + try { + final Urn queryUrn = + _queryService.createQuery( + input.getProperties().getName(), + input.getProperties().getDescription(), + QuerySource.MANUAL, + new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())), + input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()), + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java index 5c5bb288f32bf..4f5887c91b494 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java @@ -18,7 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteQueryResolver implements DataFetcher> { @@ -26,29 +25,34 @@ public class DeleteQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); - final List subjectUrns = existingSubjects != null - ? existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()) - : Collections.emptyList(); - - if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); - } - - try { - _queryService.deleteQuery(queryUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Query", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); + final List subjectUrns = + existingSubjects != null + ? existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()) + : Collections.emptyList(); + + if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); + } + + try { + _queryService.deleteQuery(queryUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Query", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index c7e70cac15bdb..fec5bb120eeba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -29,10 +32,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListQueriesResolver implements DataFetcher> { @@ -48,38 +47,52 @@ public class ListQueriesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListQueriesInput input = bindArgument(environment.getArgument("input"), ListQueriesInput.class); + final ListQueriesInput input = + bindArgument(environment.getArgument("input"), ListQueriesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); - - // First, get all Query Urns. - final SearchResult gmsResult = _entityClient.search(QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true).setSkipHighlighting(true)); - - final ListQueriesResult result = new ListQueriesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setQueries(mapUnresolvedQueries(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Queries", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + + // First, get all Query Urns. + final SearchResult gmsResult = + _entityClient.search( + QUERY_ENTITY_NAME, + query, + buildFilters(input), + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + + final ListQueriesResult result = new ListQueriesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setQueries( + mapUnresolvedQueries( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Queries", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Query objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Query objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedQueries(final List queryUrns) { final List results = new ArrayList<>(); for (final Urn urn : queryUrns) { @@ -99,13 +112,23 @@ private Filter buildFilters(@Nonnull final ListQueriesInput input) { // Optionally add a source filter. if (input.getSource() != null) { andConditions.add( - new FacetFilterInput(QUERY_SOURCE_FIELD, null, ImmutableList.of(input.getSource().toString()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_SOURCE_FIELD, + null, + ImmutableList.of(input.getSource().toString()), + false, + FilterOperator.EQUAL)); } // Optionally add an entity type filter. if (input.getDatasetUrn() != null) { andConditions.add( - new FacetFilterInput(QUERY_ENTITIES_FIELD, null, ImmutableList.of(input.getDatasetUrn()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_ENTITIES_FIELD, + null, + ImmutableList.of(input.getDatasetUrn()), + false, + FilterOperator.EQUAL)); } criteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index ef34e91d8fe77..cc284aaf7b563 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,9 +28,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class UpdateQueryResolver implements DataFetcher> { @@ -36,60 +35,72 @@ public class UpdateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateQueryInput input = bindArgument(environment.getArgument("input"), UpdateQueryInput.class); + final UpdateQueryInput input = + bindArgument(environment.getArgument("input"), UpdateQueryInput.class); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); - if (existingSubjects == null) { - // No Query Found - throw new DataHubGraphQLException(String.format("Failed to find query with urn %s", queryUrn), DataHubGraphQLErrorCode.NOT_FOUND); - } + if (existingSubjects == null) { + // No Query Found + throw new DataHubGraphQLException( + String.format("Failed to find query with urn %s", queryUrn), + DataHubGraphQLErrorCode.NOT_FOUND); + } - final List subjectUrns = existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()); - final List newSubjectUrns = input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) - .collect(Collectors.toList()) - : Collections.emptyList(); - final List impactedSubjectUrns = new ArrayList<>(); - impactedSubjectUrns.addAll(subjectUrns); - impactedSubjectUrns.addAll(newSubjectUrns); + final List subjectUrns = + existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()); + final List newSubjectUrns = + input.getSubjects() != null + ? input.getSubjects().stream() + .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) + .collect(Collectors.toList()) + : Collections.emptyList(); + final List impactedSubjectUrns = new ArrayList<>(); + impactedSubjectUrns.addAll(subjectUrns); + impactedSubjectUrns.addAll(newSubjectUrns); - if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); - } + if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); + } - try { - _queryService.updateQuery( - queryUrn, - input.getProperties() != null ? input.getProperties().getName() : null, - input.getProperties() != null ? input.getProperties().getDescription() : null, - input.getProperties() != null && input.getProperties().getStatement() != null - ? new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())) - : null, - input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()) - : null, - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Query from input %s", input), e); - } - }); + try { + _queryService.updateQuery( + queryUrn, + input.getProperties() != null ? input.getProperties().getName() : null, + input.getProperties() != null ? input.getProperties().getDescription() : null, + input.getProperties() != null && input.getProperties().getStatement() != null + ? new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())) + : null, + input.getSubjects() != null + ? input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()) + : null, + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index df1a6d4d4b00d..ca1e01b45989d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ContentParams; @@ -31,12 +33,10 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListRecommendationsResolver implements DataFetcher> { +public class ListRecommendationsResolver + implements DataFetcher> { private static final ListRecommendationsResult EMPTY_RECOMMENDATIONS = new ListRecommendationsResult(Collections.emptyList()); @@ -49,24 +49,28 @@ public CompletableFuture get(DataFetchingEnvironment final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Listing recommendations for input {}", input); - List modules = - _recommendationsService.listRecommendations(Urn.createFromString(input.getUserUrn()), - mapRequestContext(input.getRequestContext()), input.getLimit()); - return ListRecommendationsResult.builder() - .setModules(modules.stream() - .map(this::mapRecommendationModule) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList())) - .build(); - } catch (Exception e) { - log.error("Failed to get recommendations for input {}", input, e); - return EMPTY_RECOMMENDATIONS; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Listing recommendations for input {}", input); + List modules = + _recommendationsService.listRecommendations( + Urn.createFromString(input.getUserUrn()), + mapRequestContext(input.getRequestContext()), + input.getLimit()); + return ListRecommendationsResult.builder() + .setModules( + modules.stream() + .map(this::mapRecommendationModule) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())) + .build(); + } catch (Exception e) { + log.error("Failed to get recommendations for input {}", input, e); + return EMPTY_RECOMMENDATIONS; + } + }); } private com.linkedin.metadata.recommendation.RecommendationRequestContext mapRequestContext( @@ -74,22 +78,24 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq com.linkedin.metadata.recommendation.ScenarioType mappedScenarioType; try { mappedScenarioType = - com.linkedin.metadata.recommendation.ScenarioType.valueOf(requestContext.getScenario().toString()); + com.linkedin.metadata.recommendation.ScenarioType.valueOf( + requestContext.getScenario().toString()); } catch (IllegalArgumentException e) { log.error("Failed to map scenario type: {}", requestContext.getScenario(), e); throw e; } com.linkedin.metadata.recommendation.RecommendationRequestContext mappedRequestContext = - new com.linkedin.metadata.recommendation.RecommendationRequestContext().setScenario(mappedScenarioType); + new com.linkedin.metadata.recommendation.RecommendationRequestContext() + .setScenario(mappedScenarioType); if (requestContext.getSearchRequestContext() != null) { SearchRequestContext searchRequestContext = new SearchRequestContext().setQuery(requestContext.getSearchRequestContext().getQuery()); if (requestContext.getSearchRequestContext().getFilters() != null) { - searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() - .getFilters() - .stream() - .map(facetField -> criterionFromFilter(facetField)) - .collect(Collectors.toList()))); + searchRequestContext.setFilters( + new CriterionArray( + requestContext.getSearchRequestContext().getFilters().stream() + .map(facetField -> criterionFromFilter(facetField)) + .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); } @@ -98,12 +104,17 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq try { entityUrn = Urn.createFromString(requestContext.getEntityRequestContext().getUrn()); } catch (URISyntaxException e) { - log.error("Malformed URN while mapping recommendations request: {}", - requestContext.getEntityRequestContext().getUrn(), e); + log.error( + "Malformed URN while mapping recommendations request: {}", + requestContext.getEntityRequestContext().getUrn(), + e); throw new IllegalArgumentException(e); } - EntityRequestContext entityRequestContext = new EntityRequestContext().setUrn(entityUrn) - .setType(EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); + EntityRequestContext entityRequestContext = + new EntityRequestContext() + .setUrn(entityUrn) + .setType( + EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); mappedRequestContext.setEntityRequestContext(entityRequestContext); } return mappedRequestContext; @@ -115,13 +126,16 @@ private Optional mapRecommendationModule( mappedModule.setTitle(module.getTitle()); mappedModule.setModuleId(module.getModuleId()); try { - mappedModule.setRenderType(RecommendationRenderType.valueOf(module.getRenderType().toString())); + mappedModule.setRenderType( + RecommendationRenderType.valueOf(module.getRenderType().toString())); } catch (IllegalArgumentException e) { log.error("Failed to map render type: {}", module.getRenderType(), e); throw e; } mappedModule.setContent( - module.getContent().stream().map(this::mapRecommendationContent).collect(Collectors.toList())); + module.getContent().stream() + .map(this::mapRecommendationContent) + .collect(Collectors.toList())); return Optional.of(mappedModule); } @@ -145,26 +159,31 @@ private RecommendationParams mapRecommendationParams( SearchParams searchParams = new SearchParams(); searchParams.setQuery(params.getSearchParams().getQuery()); if (!params.getSearchParams().getFilters().isEmpty()) { - searchParams.setFilters(params.getSearchParams() - .getFilters() - .stream() - .map(criterion -> FacetFilter.builder().setField(criterion.getField()).setValues( - ImmutableList.of(criterion.getValue())).build()) - .collect(Collectors.toList())); + searchParams.setFilters( + params.getSearchParams().getFilters().stream() + .map( + criterion -> + FacetFilter.builder() + .setField(criterion.getField()) + .setValues(ImmutableList.of(criterion.getValue())) + .build()) + .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); } if (params.hasEntityProfileParams()) { Urn profileUrn = params.getEntityProfileParams().getUrn(); - mappedParams.setEntityProfileParams(EntityProfileParams.builder() - .setUrn(profileUrn.toString()) - .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) - .build()); + mappedParams.setEntityProfileParams( + EntityProfileParams.builder() + .setUrn(profileUrn.toString()) + .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) + .build()); } if (params.hasContentParams()) { - mappedParams.setContentParams(ContentParams.builder().setCount(params.getContentParams().getCount()).build()); + mappedParams.setContentParams( + ContentParams.builder().setCount(params.getContentParams().getCount()).build()); } return mappedParams; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java index 43d975344ba25..a71da7821f09c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.datahub.authorization.role.RoleService; @@ -13,11 +15,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j - @RequiredArgsConstructor public class AcceptRoleResolver implements DataFetcher> { private final RoleService _roleService; @@ -27,25 +25,32 @@ public class AcceptRoleResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final AcceptRoleInput input = bindArgument(environment.getArgument("input"), AcceptRoleInput.class); + final AcceptRoleInput input = + bindArgument(environment.getArgument("input"), AcceptRoleInput.class); final String inviteTokenStr = input.getInviteToken(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { - throw new RuntimeException(String.format("Invite token %s is invalid", inviteTokenStr)); - } - - final Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); - _roleService.batchAssignRoleToActors(Collections.singletonList(authentication.getActor().toUrnStr()), roleUrn, - authentication); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to accept role using invite token %s", inviteTokenStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { + throw new RuntimeException( + String.format("Invite token %s is invalid", inviteTokenStr)); + } + + final Urn roleUrn = + _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); + _roleService.batchAssignRoleToActors( + Collections.singletonList(authentication.getActor().toUrnStr()), + roleUrn, + authentication); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to accept role using invite token %s", inviteTokenStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java index dc847069afae9..1997d0ac74601 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; @@ -13,10 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAssignRoleResolver implements DataFetcher> { @@ -30,19 +29,22 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw "Unauthorized to assign roles. Please contact your DataHub administrator if this needs corrective action."); } - final BatchAssignRoleInput input = bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); + final BatchAssignRoleInput input = + bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); final String roleUrnStr = input.getRoleUrn(); final List actors = input.getActors(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); - _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); + _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 6bdf52e2f89f1..61ecf09fc91a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to create invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final CreateInviteTokenInput input = bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); + final CreateInviteTokenInput input = + bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 0b0cbbb7ba473..066753c4f7559 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class GetInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to get invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final GetInviteTokenInput input = bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); + final GetInviteTokenInput input = + bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to get invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to get invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 4746370d8603b..a1dd9219f6549 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListRolesResolver implements DataFetcher> { @@ -38,36 +37,51 @@ public class ListRolesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListRolesInput input = bindArgument(environment.getArgument("input"), ListRolesInput.class); + final ListRolesInput input = + bindArgument(environment.getArgument("input"), ListRolesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all role Urns. - final SearchResult gmsResult = - _entityClient.search(DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all role Urns. + final SearchResult gmsResult = + _entityClient.search( + DATAHUB_ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all users. - final Map entities = _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, context.getAuthentication()); + // Then, get and hydrate all users. + final Map entities = + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - final ListRolesResult result = new ListRolesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list roles", e); - } - }); + final ListRolesResult result = new ListRolesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setRoles(mapEntitiesToRoles(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list roles", e); + } + }); } private List mapEntitiesToRoles(final Collection entities) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index e9140441999e2..6d23456b76b4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregateAcrossEntitiesInput; @@ -14,25 +19,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** - * Executes a search query only to get a provided list of aggregations back. - * Does not resolve any entities as results. + * Executes a search query only to get a provided list of aggregations back. Does not resolve any + * entities as results. */ @Slf4j @RequiredArgsConstructor -public class AggregateAcrossEntitiesResolver implements DataFetcher> { +public class AggregateAcrossEntitiesResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -48,47 +48,63 @@ public CompletableFuture get(DataFetchingEnvironment environme // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - - final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; - - try { - return mapAggregateResults(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - 0, - 0, // 0 entity count because we don't want resolved entities - searchFlags, - null, - ResolverUtils.getAuthentication(environment), - facets)); - } catch (Exception e) { - log.error( - "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", - input.getTypes(), input.getQuery(), input.getOrFilters()); - throw new RuntimeException( - "Failed to execute aggregate across entities: " + String.format("entity types %s, query %s, filters: %s", - input.getTypes(), input.getQuery(), input.getOrFilters()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + + final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + + final List facets = + input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + + try { + return mapAggregateResults( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + 0, + 0, // 0 entity count because we don't want resolved entities + searchFlags, + null, + ResolverUtils.getAuthentication(environment), + facets)); + } catch (Exception e) { + log.error( + "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters()); + throw new RuntimeException( + "Failed to execute aggregate across entities: " + + String.format( + "entity types %s, query %s, filters: %s", + input.getTypes(), input.getQuery(), input.getOrFilters()), + e); + } + }); } AggregateResults mapAggregateResults(SearchResult searchResult) { final AggregateResults results = new AggregateResults(); - results.setFacets(searchResult.getMetadata().getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + results.setFacets( + searchResult.getMetadata().getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 043ecf5eb97f1..c3e843cefd5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; @@ -13,87 +17,90 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; -import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ -public class AutoCompleteForMultipleResolver implements DataFetcher> { +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ +public class AutoCompleteForMultipleResolver + implements DataFetcher> { - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); - private final Map> _typeToEntity; - private final ViewService _viewService; + private final Map> _typeToEntity; + private final ViewService _viewService; - public AutoCompleteForMultipleResolver(@Nonnull final List> searchableEntities, @Nonnull final ViewService viewService) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - _viewService = viewService; - } + public AutoCompleteForMultipleResolver( + @Nonnull final List> searchableEntities, + @Nonnull final ViewService viewService) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + _viewService = viewService; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteMultipleInput input = + bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); - if (isBlank(input.getQuery())) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) + if (isBlank(input.getQuery())) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) : null; - List types = getEntityTypes(input.getTypes(), maybeResolvedView); - if (types != null && types.size() > 0) { - return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); - } - - // By default, autocomplete only against the Default Set of Autocomplete entities - return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); + List types = getEntityTypes(input.getTypes(), maybeResolvedView); + if (types != null && types.size() > 0) { + return AutocompleteUtils.batchGetAutocompleteResults( + types.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); } - /** - * Gets the intersection of provided input types and types on the view applied (if any) - */ - @Nullable - List getEntityTypes(final @Nullable List inputTypes, final @Nullable DataHubViewInfo maybeResolvedView) { - List types = inputTypes; - if (maybeResolvedView != null) { - List inputEntityTypes = types != null ? types : new ArrayList<>(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - List stringEntityTypes = SearchUtils.intersectEntityTypes(inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); + // By default, autocomplete only against the Default Set of Autocomplete entities + return AutocompleteUtils.batchGetAutocompleteResults( + AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); + } - types = stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); - } + /** Gets the intersection of provided input types and types on the view applied (if any) */ + @Nullable + List getEntityTypes( + final @Nullable List inputTypes, + final @Nullable DataHubViewInfo maybeResolvedView) { + List types = inputTypes; + if (maybeResolvedView != null) { + List inputEntityTypes = types != null ? types : new ArrayList<>(); + final List inputEntityNames = + inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List stringEntityTypes = + SearchUtils.intersectEntityTypes( + inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); - return types; + types = + stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); } + + return types; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java index e13545aadc516..235f5f8d27899 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java @@ -1,90 +1,94 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.linkedin.datahub.graphql.types.SearchableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteInput; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ public class AutoCompleteResolver implements DataFetcher> { - private static final int DEFAULT_LIMIT = 5; + private static final int DEFAULT_LIMIT = 5; - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public AutoCompleteResolver(@Nonnull final List> searchableEntities) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - } + public AutoCompleteResolver(@Nonnull final List> searchableEntities) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final AutoCompleteInput input = bindArgument(environment.getArgument("input"), AutoCompleteInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final AutoCompleteInput input = + bindArgument(environment.getArgument("input"), AutoCompleteInput.class); - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - if (isBlank(sanitizedQuery)) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + if (isBlank(sanitizedQuery)) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug("Executing autocomplete. " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + "Executing autocomplete. " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), input.getLimit())); - return _typeToEntity.get(input.getType()).autoComplete( - sanitizedQuery, - input.getField(), - filter, - limit, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + return _typeToEntity + .get(input.getType()) + .autoComplete( + sanitizedQuery, input.getField(), filter, limit, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), - input.getLimit()) + " " - + e.getMessage()); - throw new RuntimeException("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", - input.getType(), - input.getField(), - input.getQuery(), - input.getFilters(), - input.getLimit()), e); - } - }); - } + input.getLimit()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", + input.getType(), + input.getField(), + input.getQuery(), + input.getFilters(), + input.getLimit()), + e); + } + }); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java index 40722211de8d3..9cd860781c0d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java @@ -14,69 +14,81 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - - public class AutocompleteUtils { private static final Logger _logger = LoggerFactory.getLogger(AutocompleteUtils.class.getName()); private static final int DEFAULT_LIMIT = 5; - private AutocompleteUtils() { } + private AutocompleteUtils() {} public static CompletableFuture batchGetAutocompleteResults( List> entities, String sanitizedQuery, AutoCompleteMultipleInput input, DataFetchingEnvironment environment, - @Nullable DataHubViewInfo view - ) { + @Nullable DataHubViewInfo view) { final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - final List> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> { - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = view != null - ? SearchUtils.combineFilters(filter, view.getDefinition().getFilter()) - : filter; + final List> autoCompletesFuture = + entities.stream() + .map( + entity -> + CompletableFuture.supplyAsync( + () -> { + final Filter filter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = + view != null + ? SearchUtils.combineFilters( + filter, view.getDefinition().getFilter()) + : filter; - try { - final AutoCompleteResults searchResult = entity.autoComplete( - sanitizedQuery, - input.getField(), - finalFilter, - limit, - environment.getContext() - ); - return new AutoCompleteResultForEntity( - entity.type(), - searchResult.getSuggestions(), - searchResult.getEntities() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete all: " - + String.format("field %s, query %s, filters: %s, limit: %s", - input.getField(), - input.getQuery(), - filter, - input.getLimit()), e); - return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList()); - } - })).collect(Collectors.toList()); + try { + final AutoCompleteResults searchResult = + entity.autoComplete( + sanitizedQuery, + input.getField(), + finalFilter, + limit, + environment.getContext()); + return new AutoCompleteResultForEntity( + entity.type(), + searchResult.getSuggestions(), + searchResult.getEntities()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete all: " + + String.format( + "field %s, query %s, filters: %s, limit: %s", + input.getField(), + input.getQuery(), + filter, + input.getLimit()), + e); + return new AutoCompleteResultForEntity( + entity.type(), Collections.emptyList(), Collections.emptyList()); + } + })) + .collect(Collectors.toList()); return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0])) - .thenApplyAsync((res) -> { - AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); - List suggestions = autoCompletesFuture.stream() - .map(CompletableFuture::join) - .filter( + .thenApplyAsync( + (res) -> { + AutoCompleteMultipleResults result = + new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); + List suggestions = + autoCompletesFuture.stream() + .map(CompletableFuture::join) + .filter( autoCompleteResultForEntity -> - autoCompleteResultForEntity.getSuggestions() != null && autoCompleteResultForEntity.getSuggestions().size() > 0 - ) - .collect(Collectors.toList()); - result.setSuggestions(suggestions); - return result; - }); + autoCompleteResultForEntity.getSuggestions() != null + && autoCompleteResultForEntity.getSuggestions().size() > 0) + .collect(Collectors.toList()); + result.setSuggestions(suggestions); + return result; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 17058fd8d7cff..e54955e1857f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.Entity; @@ -18,26 +23,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor -public class GetQuickFiltersResolver implements DataFetcher> { +public class GetQuickFiltersResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -47,41 +46,51 @@ public class GetQuickFiltersResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { - final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); - - return CompletableFuture.supplyAsync(() -> { - final GetQuickFiltersResult result = new GetQuickFiltersResult(); - final List quickFilters = new ArrayList<>(); - - try { - final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input); - final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); - } catch (Exception e) { - log.error("Failed getting quick filters", e); - throw new RuntimeException("Failed to to get quick filters", e); - } - - result.setQuickFilters(quickFilters); - return result; - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final GetQuickFiltersInput input = + bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final GetQuickFiltersResult result = new GetQuickFiltersResult(); + final List quickFilters = new ArrayList<>(); + + try { + final SearchResult searchResult = + getSearchResults(ResolverUtils.getAuthentication(environment), input); + final AggregationMetadataArray aggregations = + searchResult.getMetadata().getAggregations(); + + quickFilters.addAll(getPlatformQuickFilters(aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + } catch (Exception e) { + log.error("Failed getting quick filters", e); + throw new RuntimeException("Failed to to get quick filters", e); + } + + result.setQuickFilters(quickFilters); + return result; + }); } - /** - * Do a star search with view filter applied to get info about all data in this instance. - */ - private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) - : null; - final List entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + /** Do a star search with view filter applied to get info about all data in this instance. */ + private SearchResult getSearchResults( + @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + throws Exception { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + : null; + final List entityNames = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) : entityNames, "*", maybeResolvedView != null @@ -95,67 +104,88 @@ private SearchResult getSearchResults(@Nonnull final Authentication authenticati } /** - * Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters + * Get platforms and their count from an aggregations array, sorts by entity count, and map the + * top 5 to quick filters */ - private List getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getPlatformQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List platforms = new ArrayList<>(); - final Optional platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); + final Optional platformAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); if (platformAggregations.isPresent()) { final List sortedPlatforms = - platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList()); - sortedPlatforms.forEach(platformFilter -> { - if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); - } - }); + platformAggregations.get().getFilterValues().stream() + .sorted(Comparator.comparingLong(val -> -val.getFacetCount())) + .collect(Collectors.toList()); + sortedPlatforms.forEach( + platformFilter -> { + if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { + platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + } + }); } // return platforms sorted alphabetically by their name - return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList()); + return platforms.stream() + .sorted(Comparator.comparing(QuickFilter::getValue)) + .collect(Collectors.toList()); } /** - * Gets entity type quick filters from search aggregations. First, get source entity type quick filters - * from a prioritized list. Do the same for datathub entity types. + * Gets entity type quick filters from search aggregations. First, get source entity type quick + * filters from a prioritized list. Do the same for datathub entity types. */ - private List getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getEntityTypeQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List entityTypes = new ArrayList<>(); - final Optional entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); + final Optional entityAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); if (entityAggregations.isPresent()) { final List sourceEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, + SOURCE_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(sourceEntityTypeFilters); final List dataHubEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, + DATAHUB_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(dataHubEntityTypeFilters); } return entityTypes; } /** - * Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined + * Create a quick filters list by looping over prioritized list and adding filters that exist + * until we reach the maxListSize defined */ private List getQuickFiltersFromList( @Nonnull final List prioritizedList, final int maxListSize, - @Nonnull final AggregationMetadata entityAggregations - ) { + @Nonnull final AggregationMetadata entityAggregations) { final List entityTypes = new ArrayList<>(); - prioritizedList.forEach(entityType -> { - if (entityTypes.size() < maxListSize) { - final Optional entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst(); - if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); - } - } - }); + prioritizedList.forEach( + entityType -> { + if (entityTypes.size() < maxListSize) { + final Optional entityFilter = + entityAggregations.getFilterValues().stream() + .filter(val -> val.getValue().equals(entityType)) + .findFirst(); + if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { + entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + } + } + }); return entityTypes; } - private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) { + private QuickFilter mapQuickFilter( + @Nonnull final String field, @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); @@ -167,9 +197,7 @@ private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final F return quickFilter; } - /** - * If we're working with an entity type filter, we need to convert the value to an EntityType - */ + /** If we're working with an entity type filter, we need to convert the value to an EntityType */ public static String convertFilterValue(String filterValue, boolean isEntityType) { if (isEntityType) { return EntityTypeMapper.getType(filterValue).toString(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index d576ffc8ca280..742d1d170de64 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,13 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossEntitiesResolver implements DataFetcher> { @@ -48,57 +45,80 @@ public CompletableFuture get(DataFetchingEnvironment environment) bindArgument(environment.getArgument("input"), ScrollAcrossEntitiesInput.class); final List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - final List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + final List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - // escape forward slash since it is a reserved character in Elasticsearch, default to * if blank/empty - final String sanitizedQuery = StringUtils.isNotBlank(input.getQuery()) - ? ResolverUtils.escapeForwardSlash(input.getQuery()) : "*"; + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String sanitizedQuery = + StringUtils.isNotBlank(input.getQuery()) + ? ResolverUtils.escapeForwardSlash(input.getQuery()) + : "*"; - @Nullable - final String scrollId = input.getScrollId(); + @Nullable final String scrollId = input.getScrollId(); final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; - return UrnScrollResultsMapper.map(_entityClient.scrollAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - scrollId, - keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), e); - } - }); + return UrnScrollResultsMapper.map( + _entityClient.scrollAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + scrollId, + keepAlive, + count, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 78be1ac309690..adab62c22bb72 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AndFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossLineageResolver @@ -53,55 +50,98 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final String scrollId = input.getScrollId() != null ? input.getScrollId() : null; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); - final List facetFilters = filters.stream() - .map(AndFilterInput::getAnd) - .flatMap(List::stream) - .collect(Collectors.toList()); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); final Integer maxHops = getMaxHops(facetFilters); String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); - SearchFlags searchFlags = null; - final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = new SearchFlags() - .setSkipCache(inputFlags.getSkipCache()) - .setFulltext(inputFlags.getFulltext()) - .setMaxAggValues(inputFlags.getMaxAggValues()); - } - return UrnScrollAcrossLineageResultsMapper.map( - _entityClient.scrollAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), null, scrollId, - keepAlive, count, startTimeMillis, endTimeMillis, searchFlags, ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - throw new RuntimeException("Failed to execute scroll across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count), e); - } - }); + SearchFlags searchFlags = null; + final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = + input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = + new SearchFlags() + .setSkipCache(inputFlags.getSkipCache()) + .setFulltext(inputFlags.getFulltext()) + .setMaxAggValues(inputFlags.getMaxAggValues()); + } + return UrnScrollAcrossLineageResultsMapper.map( + _entityClient.scrollAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), + null, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + throw new RuntimeException( + "Failed to execute scroll across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 1022b25b3cd99..f8178e3b396cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; @@ -19,13 +22,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossEntitiesResolver implements DataFetcher> { @@ -50,43 +47,65 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) : null; - - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - start, - count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + + SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SortCriterion sortCriterion = + input.getSortInput() != null + ? mapSortCriterion(input.getSortInput().getSortCriterion()) + : null; + + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + start, + count, + searchFlags, + sortCriterion, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9f489183f4af7..0f5d2d90ba0c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossLineageResolver @@ -54,76 +51,95 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getFilters() != null ? input.getFilters() : new ArrayList<>(); + final List filters = + input.getFilters() != null ? input.getFilters() : new ArrayList<>(); final Integer maxHops = getMaxHops(filters); @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, - resolvedDirection, - input.getTypes(), - input.getQuery(), - filters, - start, - count); - - final Filter filter = - ResolverUtils.buildFilter( + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), filters, - input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - if (inputFlags.getSkipHighlighting() == null) { - searchFlags.setSkipHighlighting(true); - } - } else { - searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); - } - - return UrnSearchAcrossLineageResultsMapper.map( - _entityClient.searchAcrossLineage( + start, + count); + + final Filter filter = ResolverUtils.buildFilter(filters, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + if (inputFlags.getSkipHighlighting() == null) { + searchFlags.setSkipHighlighting(true); + } + } else { + searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); + } + + return UrnSearchAcrossLineageResultsMapper.map( + _entityClient.searchAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + filter, + null, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", urn, resolvedDirection, - entityNames, - sanitizedQuery, - maxHops, - filter, - null, + input.getTypes(), + input.getQuery(), + filters, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); - throw new RuntimeException("Failed to execute search across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count), e); - } finally { - log.debug("Returning from search across lineage resolver"); - } - }); + count); + throw new RuntimeException( + "Failed to execute search across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + start, + count), + e); + } finally { + log.debug("Returning from search across lineage resolver"); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 0e66d6e601399..6821423887923 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -15,17 +18,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - -/** - * Resolver responsible for resolving the 'search' field of the Query type - */ +/** Resolver responsible for resolving the 'search' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchResolver implements DataFetcher> { - private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = new SearchFlags() + private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = + new SearchFlags() .setFulltext(true) .setMaxAggValues(20) .setSkipCache(false) @@ -54,22 +52,52 @@ public CompletableFuture get(DataFetchingEnvironment environment) searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); - return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), - input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment), + return UrnSearchResultsMapper.map( + _entityClient.search( + entityName, + sanitizedQuery, + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), + null, + start, + count, + ResolverUtils.getAuthentication(environment), searchFlags)); - } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags), e); - } - }); + } catch (Exception e) { + log.error( + "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index fb146ef72877d..d04cb57e1a860 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -1,5 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -28,31 +44,11 @@ import lombok.extern.slf4j.Slf4j; import org.codehaus.plexus.util.CollectionUtils; -import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; - - @Slf4j public class SearchUtils { - private SearchUtils() { - } + private SearchUtils() {} - /** - * Entities that are searched by default in Search Across Entities - */ + /** Entities that are searched by default in Search Across Entities */ public static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -76,10 +72,7 @@ private SearchUtils() { EntityType.DATA_PRODUCT, EntityType.NOTEBOOK); - - /** - * Entities that are part of autocomplete by default in Auto Complete Across Entities - */ + /** Entities that are part of autocomplete by default in Auto Complete Across Entities */ public static final List AUTO_COMPLETE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -99,63 +92,64 @@ private SearchUtils() { EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); - /** - * A prioritized list of source filter types used to generate quick filters - */ - public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( - DATASET_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME, - CHART_ENTITY_NAME, - CONTAINER_ENTITY_NAME, - ML_MODEL_ENTITY_NAME, - ML_MODEL_GROUP_ENTITY_NAME, - ML_FEATURE_ENTITY_NAME, - ML_FEATURE_TABLE_ENTITY_NAME, - ML_PRIMARY_KEY_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of source filter types used to generate quick filters */ + public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = + Stream.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + CHART_ENTITY_NAME, + CONTAINER_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); - /** - * A prioritized list of DataHub filter types used to generate quick filters - */ - public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of( - DOMAIN_ENTITY_NAME, - GLOSSARY_TERM_ENTITY_NAME, - CORP_GROUP_ENTITY_NAME, - CORP_USER_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of DataHub filter types used to generate quick filters */ + public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = + Stream.of( + DOMAIN_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + CORP_GROUP_ENTITY_NAME, + CORP_USER_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); /** - * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter} - * in disjunctive normal form. + * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link + * Filter} in disjunctive normal form. * * @param baseFilter the filter to apply the view to * @param viewFilter the view filter, null if it doesn't exist - * * @return a new instance of {@link Filter} representing the applied view. */ @Nonnull - public static Filter combineFilters(@Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { - final Filter finalBaseFilter = baseFilter == null - ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) - : baseFilter; + public static Filter combineFilters( + @Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { + final Filter finalBaseFilter = + baseFilter == null + ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) + : baseFilter; // Join the filter conditions in Disjunctive Normal Form. return combineFiltersInConjunction(finalBaseFilter, viewFilter); } /** - * Returns the intersection of two sets of entity types. (Really just string lists). - * If either is empty, consider the entity types list to mean "all" (take the other set). + * Returns the intersection of two sets of entity types. (Really just string lists). If either is + * empty, consider the entity types list to mean "all" (take the other set). * * @param baseEntityTypes the entity types to apply the view to * @param viewEntityTypes the view info, null if it doesn't exist - * * @return the intersection of the two input sets */ @Nonnull - public static List intersectEntityTypes(@Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { + public static List intersectEntityTypes( + @Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { if (baseEntityTypes.isEmpty()) { return viewEntityTypes; } @@ -171,126 +165,29 @@ public static List intersectEntityTypes(@Nonnull final List base * * @param filter1 the first filter in the pair * @param filter2 the second filter in the pair - * - * This method supports either Filter format, where the "or" field is used, instead - * of criteria. If the criteria filter is used, then it will be converted into an "OR" before - * returning the new filter. - * + *

This method supports either Filter format, where the "or" field is used, instead of + * criteria. If the criteria filter is used, then it will be converted into an "OR" before + * returning the new filter. * @return the result of joining the 2 filters in a conjunction (AND) - * - * How does it work? It basically cross-products the conjunctions inside of each Filter clause. - * - * Example Inputs: - * filter1 -> - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * } - * ] - * } - * ] - * } - * filter2 -> - * { - * or: [ - * { - * and: [ - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * }, - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term2"] - * } - * ] - * } - * ] - * } - * Example Output: - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * ] - * } + *

How does it work? It basically cross-products the conjunctions inside of each Filter + * clause. + *

Example Inputs: filter1 -> { or: [ { and: [ { field: tags, condition: EQUAL, values: + * ["urn:li:tag:tag"] } ] }, { and: [ { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] } ] } ] } filter2 -> { or: [ { and: [ { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] }, ] }, { and: [ { field: glossaryTerms, + * condition: EQUAL, values: ["urn:li:glossaryTerm:term2"] } ] } ] } Example Output: { or: [ { + * and: [ { field: tags, condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] } ] }, { and: [ { field: tags, + * condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: glossaryTerms, condition: EQUAL, + * values: ["urn:li:glosaryTerm:term2"] } ] }, { and: [ { field: glossaryTerm, condition: + * EQUAL, values: ["urn:li:glossaryTerm:term"] }, { field: domain, condition: EQUAL, values: + * ["urn:li:domain:domain"] } ] }, { and: [ { field: glossaryTerm, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] }, { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glosaryTerm:term2"] } ] }, ] } */ @Nonnull - private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, @Nonnull final Filter filter2) { + private static Filter combineFiltersInConjunction( + @Nonnull final Filter filter1, @Nonnull final Filter filter2) { final Filter finalFilter1 = convertToV2Filter(filter1); final Filter finalFilter2 = convertToV2Filter(filter2); @@ -310,7 +207,8 @@ private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, for (ConjunctiveCriterion conjunction2 : finalFilter2.getOr()) { final List joinedCriterion = new ArrayList<>(conjunction1.getAnd()); joinedCriterion.addAll(conjunction2.getAnd()); - ConjunctiveCriterion newConjunction = new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); + ConjunctiveCriterion newConjunction = + new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); newDisjunction.add(newConjunction); } } @@ -325,38 +223,45 @@ private static Filter convertToV2Filter(@Nonnull Filter filter) { } else if (filter.hasCriteria()) { // Convert criteria to an OR return new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(filter.getCriteria()) - ))); + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(filter.getCriteria())))); } throw new IllegalArgumentException( - String.format("Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", filter)); + String.format( + "Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", + filter)); } /** - * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the specified - * urn cannot be found. + * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the + * specified urn cannot be found. */ - public static DataHubViewInfo resolveView(@Nonnull ViewService viewService, @Nonnull final Urn viewUrn, + public static DataHubViewInfo resolveView( + @Nonnull ViewService viewService, + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { try { DataHubViewInfo maybeViewInfo = viewService.getViewInfo(viewUrn, authentication); if (maybeViewInfo == null) { - log.warn(String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); + log.warn( + String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); } return maybeViewInfo; } catch (Exception e) { - throw new RuntimeException(String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), e); + throw new RuntimeException( + String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), + e); } } // Assumption is that filter values for degree are either null, 3+, 2, or 1. public static Integer getMaxHops(List filters) { - Set degreeFilterValues = filters.stream() - .filter(filter -> filter.getField().equals("degree")) - .flatMap(filter -> filter.getValues().stream()) - .collect(Collectors.toSet()); + Set degreeFilterValues = + filters.stream() + .filter(filter -> filter.getField().equals("degree")) + .flatMap(filter -> filter.getValues().stream()) + .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { if (degreeFilterValues.contains("2")) { @@ -368,7 +273,8 @@ public static Integer getMaxHops(List filters) { return maxHops; } - public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { + public static SearchFlags mapInputFlags( + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); @@ -376,7 +282,8 @@ public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.S return searchFlags; } - public static SortCriterion mapSortCriterion(com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { + public static SortCriterion mapSortCriterion( + com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { SortCriterion result = new SortCriterion(); result.setField(sortCriterion.getField()); result.setOrder(SortOrder.valueOf(sortCriterion.getSortOrder().name())); @@ -388,4 +295,4 @@ public static List getEntityNames(List inputTypes) { (inputTypes == null || inputTypes.isEmpty()) ? SEARCHABLE_ENTITY_TYPES : inputTypes; return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java index 8c21277b66a69..a7e0d93c7bd1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -16,58 +18,61 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating the authenticated user's View-specific settings. - */ +/** Resolver responsible for updating the authenticated user's View-specific settings. */ @Slf4j @RequiredArgsConstructor -public class UpdateCorpUserViewsSettingsResolver implements DataFetcher> { +public class UpdateCorpUserViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateCorpUserViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); + final UpdateCorpUserViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { + return CompletableFuture.supplyAsync( + () -> { + try { - final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); + final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); - final CorpUserSettings maybeSettings = _settingsService.getCorpUserSettings( - userUrn, - context.getAuthentication() - ); + final CorpUserSettings maybeSettings = + _settingsService.getCorpUserSettings(userUrn, context.getAuthentication()); - final CorpUserSettings newSettings = maybeSettings == null - ? new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) - : maybeSettings; + final CorpUserSettings newSettings = + maybeSettings == null + ? new CorpUserSettings() + .setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) + : maybeSettings; - // Patch the new corp user settings. This does a R-M-F. - updateCorpUserSettings(newSettings, input); + // Patch the new corp user settings. This does a R-M-F. + updateCorpUserSettings(newSettings, input); - _settingsService.updateCorpUserSettings( - userUrn, - newSettings, - context.getAuthentication() - ); - return true; - } catch (Exception e) { - log.error("Failed to perform user view settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update to user view settings against input %s", input.toString()), e); - } - }); + _settingsService.updateCorpUserSettings( + userUrn, newSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user view settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform update to user view settings against input %s", + input.toString()), + e); + } + }); } private static void updateCorpUserSettings( @Nonnull final CorpUserSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - final CorpUserViewsSettings newViewSettings = settings.hasViews() - ? settings.getViews() - : new CorpUserViewsSettings(); + final CorpUserViewsSettings newViewSettings = + settings.hasViews() ? settings.getViews() : new CorpUserViewsSettings(); updateCorpUserViewsSettings(newViewSettings, input); settings.setViews(newViewSettings); } @@ -75,9 +80,8 @@ private static void updateCorpUserSettings( private static void updateCorpUserViewsSettings( @Nonnull final CorpUserViewsSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java index f1aba3d9247c5..208e871743269 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java @@ -14,11 +14,11 @@ /** * Retrieves the Global Settings related to the Views feature. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ @Slf4j -public class GlobalViewsSettingsResolver implements - DataFetcher> { +public class GlobalViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @@ -27,25 +27,29 @@ public GlobalViewsSettingsResolver(final SettingsService settingsService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); - return globalSettings != null && globalSettings.hasViews() - ? mapGlobalViewsSettings(globalSettings.getViews()) - : new GlobalViewsSettings(); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve Global Views Settings", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); + return globalSettings != null && globalSettings.hasViews() + ? mapGlobalViewsSettings(globalSettings.getViews()) + : new GlobalViewsSettings(); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Global Views Settings", e); + } + }); } - private static GlobalViewsSettings mapGlobalViewsSettings(@Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { + private static GlobalViewsSettings mapGlobalViewsSettings( + @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { final GlobalViewsSettings result = new GlobalViewsSettings(); if (settings.hasDefaultView()) { result.setDefaultView(settings.getDefaultView().toString()); } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java index c90ec04b3a2df..7d37683785fc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,13 +17,10 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Resolver responsible for updating the Global Views settings. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ public class UpdateGlobalViewsSettingsResolver implements DataFetcher> { @@ -32,45 +31,50 @@ public UpdateGlobalViewsSettingsResolver(@Nonnull final SettingsService settings } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateGlobalViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); + final UpdateGlobalViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (AuthorizationUtils.canManageGlobalViews(context)) { - try { - // First, fetch the existing global settings. This does a R-M-F. - final GlobalSettingsInfo maybeGlobalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageGlobalViews(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); - final GlobalSettingsInfo newGlobalSettings = maybeGlobalSettings != null - ? maybeGlobalSettings - : new GlobalSettingsInfo(); + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); - final GlobalViewsSettings newGlobalViewsSettings = newGlobalSettings.hasViews() - ? newGlobalSettings.getViews() - : new GlobalViewsSettings(); + final GlobalViewsSettings newGlobalViewsSettings = + newGlobalSettings.hasViews() + ? newGlobalSettings.getViews() + : new GlobalViewsSettings(); - // Next, patch the global views settings. - updateViewsSettings(newGlobalViewsSettings, input); - newGlobalSettings.setViews(newGlobalViewsSettings); + // Next, patch the global views settings. + updateViewsSettings(newGlobalViewsSettings, input); + newGlobalSettings.setViews(newGlobalViewsSettings); - // Finally, write back to GMS. - _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update global view settings! %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Finally, write back to GMS. + _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update global view settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static void updateViewsSettings( @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings, @Nonnull final UpdateGlobalViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java index 0e93cdfb231fa..9ea6cba0f211a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.EntityKeyUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -27,59 +31,64 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.EntityKeyUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchGetStepStatesResolver implements DataFetcher> { +public class BatchGetStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); final BatchGetStepStatesInput input = bindArgument(environment.getArgument("input"), BatchGetStepStatesInput.class); - return CompletableFuture.supplyAsync(() -> { - Map urnsToIdsMap; - Set urns; - Map entityResponseMap; + return CompletableFuture.supplyAsync( + () -> { + Map urnsToIdsMap; + Set urns; + Map entityResponseMap; - try { - urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); - urns = urnsToIdsMap.keySet(); - entityResponseMap = _entityClient.batchGetV2(DATAHUB_STEP_STATE_ENTITY_NAME, urns, - ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), authentication); - } catch (Exception e) { - throw new RuntimeException(e); - } + try { + urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); + urns = urnsToIdsMap.keySet(); + entityResponseMap = + _entityClient.batchGetV2( + DATAHUB_STEP_STATE_ENTITY_NAME, + urns, + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException(e); + } - final Map stepStatePropertiesMap = new HashMap<>(); - for (Map.Entry entry : entityResponseMap.entrySet()) { - final Urn urn = entry.getKey(); - final DataHubStepStateProperties stepStateProperties = getStepStateProperties(urn, entry.getValue()); - if (stepStateProperties != null) { - stepStatePropertiesMap.put(urn, stepStateProperties); - } - } + final Map stepStatePropertiesMap = new HashMap<>(); + for (Map.Entry entry : entityResponseMap.entrySet()) { + final Urn urn = entry.getKey(); + final DataHubStepStateProperties stepStateProperties = + getStepStateProperties(urn, entry.getValue()); + if (stepStateProperties != null) { + stepStatePropertiesMap.put(urn, stepStateProperties); + } + } - final List results = stepStatePropertiesMap.entrySet() - .stream() - .map(entry -> buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); - result.setResults(results); - return result; - }); + final List results = + stepStatePropertiesMap.entrySet().stream() + .map( + entry -> + buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); + result.setResults(results); + return result; + }); } @Nonnull - private Map buildUrnToIdMap(@Nonnull final List ids, @Nonnull final Authentication authentication) + private Map buildUrnToIdMap( + @Nonnull final List ids, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Map urnToIdMap = new HashMap<>(); for (final String id : ids) { @@ -99,37 +108,37 @@ private Urn getStepStateUrn(@Nonnull final String id) { } @Nullable - private DataHubStepStateProperties getStepStateProperties(@Nonnull final Urn urn, - @Nonnull final EntityResponse entityResponse) { + private DataHubStepStateProperties getStepStateProperties( + @Nonnull final Urn urn, @Nonnull final EntityResponse entityResponse) { final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); // If aspect is not present, log the error and return null. if (!aspectMap.containsKey(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)) { log.error("Failed to find step state properties for urn: " + urn); return null; } - return new DataHubStepStateProperties(aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); + return new DataHubStepStateProperties( + aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); } @Nonnull - private StepStateResult buildStepStateResult(@Nonnull final String id, - @Nonnull final DataHubStepStateProperties stepStateProperties) { + private StepStateResult buildStepStateResult( + @Nonnull final String id, @Nonnull final DataHubStepStateProperties stepStateProperties) { final StepStateResult result = new StepStateResult(); result.setId(id); - final List mappedProperties = stepStateProperties - .getProperties() - .entrySet() - .stream() - .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + final List mappedProperties = + stepStateProperties.getProperties().entrySet().stream() + .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); result.setProperties(mappedProperties); return result; } @Nonnull - private StringMapEntry buildStringMapEntry(@Nonnull final String key, @Nonnull final String value) { + private StringMapEntry buildStringMapEntry( + @Nonnull final String key, @Nonnull final String value) { final StringMapEntry entry = new StringMapEntry(); entry.setKey(key); entry.setValue(value); return entry; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java index e4c21207ddd34..23d77ebba7457 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -25,19 +29,15 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchUpdateStepStatesResolver implements DataFetcher> { +public class BatchUpdateStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,20 +46,23 @@ public CompletableFuture get(@Nonnull final DataFet final List states = input.getStates(); final String actorUrnStr = authentication.getActor().toUrnStr(); - return CompletableFuture.supplyAsync(() -> { - final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); - final AuditStamp auditStamp = new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); - final List results = states - .stream() - .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) - .collect(Collectors.toList()); - final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); - result.setResults(results); - return result; - }); + return CompletableFuture.supplyAsync( + () -> { + final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); + final AuditStamp auditStamp = + new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); + final List results = + states.stream() + .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) + .collect(Collectors.toList()); + final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); + result.setResults(results); + return result; + }); } - private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStateInput state, + private UpdateStepStateResult buildUpdateStepStateResult( + @Nonnull final StepStateInput state, @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final String id = state.getId(); @@ -70,19 +73,27 @@ private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStat return updateStepStateResult; } - private boolean updateStepState(@Nonnull final String id, - @Nonnull final List inputProperties, @Nonnull final AuditStamp auditStamp, + private boolean updateStepState( + @Nonnull final String id, + @Nonnull final List inputProperties, + @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final Map properties = - inputProperties.stream().collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + inputProperties.stream() + .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); try { final DataHubStepStateKey stepStateKey = new DataHubStepStateKey().setId(id); final DataHubStepStateProperties stepStateProperties = - new DataHubStepStateProperties().setProperties(new StringMap(properties)).setLastModified(auditStamp); + new DataHubStepStateProperties() + .setProperties(new StringMap(properties)) + .setLastModified(auditStamp); final MetadataChangeProposal proposal = - buildMetadataChangeProposal(DATAHUB_STEP_STATE_ENTITY_NAME, stepStateKey, - DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, stepStateProperties); + buildMetadataChangeProposal( + DATAHUB_STEP_STATE_ENTITY_NAME, + stepStateKey, + DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, + stepStateProperties); _entityClient.ingestProposal(proposal, authentication, false); return true; } catch (Exception e) { @@ -90,4 +101,4 @@ private boolean updateStepState(@Nonnull final String id, return false; } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 9ee24e6941017..153c95c697a77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -22,14 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS privilege. + * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -42,43 +42,58 @@ public class CreateTagResolver implements DataFetcher> public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateTagInput input = bindArgument(environment.getArgument("input"), CreateTagInput.class); - - return CompletableFuture.supplyAsync(() -> { + final CreateTagInput input = + bindArgument(environment.getArgument("input"), CreateTagInput.class); - if (!AuthorizationUtils.canCreateTags(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateTags(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - // Create the Tag Key - final TagKey key = new TagKey(); + try { + // Create the Tag Key + final TagKey key = new TagKey(); - // Take user provided id OR generate a random UUID for the Tag. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); + // Take user provided id OR generate a random UUID for the Tag. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Tag already exists!"); - } + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Tag already exists!"); + } - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); - String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); + String tagUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } - OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return tagUrn; - } catch (Exception e) { - log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + OwnerUtils.addCreatorAsOwner( + context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return tagUrn; + } catch (Exception e) { + log.error( + "Failed to create Tag with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), + e); + } + }); } private TagProperties mapTagProperties(final CreateTagInput input) { @@ -87,4 +102,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java index e6c3cf49df8db..c5b86b013103c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteTagResolver implements DataFetcher> { @@ -25,33 +22,41 @@ public DeleteTagResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String tagUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(tagUrn); - return CompletableFuture.supplyAsync(() -> { - - if (AuthorizationUtils.canManageTags(context) || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageTags(context) + || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Tag with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format( - "Caught exception while attempting to clear all entity references for Tag with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", tagUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", tagUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index e2aa5905be8bd..7b9290b4532b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -21,19 +24,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -42,48 +43,55 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn tagUrn = Urn.createFromString(environment.getArgument("urn")); final String colorHex = environment.getArgument("colorHex"); - return CompletableFuture.supplyAsync(() -> { - - // If user is not authorized, then throw exception. - if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { - throw new IllegalArgumentException( - String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); - } - - try { - TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( - tagUrn.toString(), - TAG_PROPERTIES_ASPECT_NAME, - _entityService, - null); - - if (tagProperties == null) { - throw new IllegalArgumentException("Failed to set tag color. Tag properties does not yet exist!"); - } - - tagProperties.setColorHex(colorHex); - - // Update the TagProperties aspect. - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set color for Tag with urn %s", tagUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // If user is not authorized, then throw exception. + if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // If tag does not exist, then throw exception. + if (!_entityService.exists(tagUrn)) { + throw new IllegalArgumentException( + String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); + } + + try { + TagProperties tagProperties = + (TagProperties) + EntityUtils.getAspectFromEntity( + tagUrn.toString(), TAG_PROPERTIES_ASPECT_NAME, _entityService, null); + + if (tagProperties == null) { + throw new IllegalArgumentException( + "Failed to set tag color. Tag properties does not yet exist!"); + } + + tagProperties.setColorHex(colorHex); + + // Update the TagProperties aspect. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); + throw new RuntimeException( + String.format("Failed to set color for Tag with urn %s", tagUrn), e); + } + }); } public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -92,4 +100,4 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java index 14ae9f96eb683..b12b345a7b211 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,15 +20,7 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Creates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class CreateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,39 +33,44 @@ public CreateTestResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final CreateTestInput input = bindArgument(environment.getArgument("input"), CreateTestInput.class); - - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - - try { - - // Create new test - // Since we are creating a new Test, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); - - // Create the Ingestion source key - final TestKey key = new TestKey(); - key.setId(uuidStr); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), - authentication)) { - throw new IllegalArgumentException("This Test already exists!"); + final CreateTestInput input = + bindArgument(environment.getArgument("input"), CreateTestInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + + try { + + // Create new test + // Since we are creating a new Test, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); + + // Create the Ingestion source key + final TestKey key = new TestKey(); + key.setId(uuidStr); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Test already exists!"); + } + + // Create the Test info. + final TestInfo info = mapCreateTestInput(input); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } } - - // Create the Test info. - final TestInfo info = mapCreateTestInput(input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapCreateTestInput(final CreateTestInput input) { @@ -79,5 +81,4 @@ private static TestInfo mapCreateTestInput(final CreateTestInput input) { result.setDefinition(mapDefinition(input.getDefinition())); return result; } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java index e0c878dc652bd..6bc7e479b305c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -7,8 +9,6 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - /** * Resolver responsible for hard deleting a particular DataHub Test. Requires MANAGE_TESTS @@ -23,20 +23,24 @@ public DeleteTestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String testUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(testUrn); - return CompletableFuture.supplyAsync(() -> { - if (canManageTests(context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against Test with urn %s", testUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against Test with urn %s", testUrn), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index a1e1e48aae847..f345d9ceb21e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.datahub.graphql.generated.ListTestsResult; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -20,12 +23,9 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - - /** - * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform + * privilege. */ public class ListTestsResolver implements DataFetcher> { @@ -39,45 +39,50 @@ public ListTestsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - final ListTestsInput input = bindArgument(environment.getArgument("input"), ListTestsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? "" : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + final ListTestsInput input = + bindArgument(environment.getArgument("input"), ListTestsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? "" : input.getQuery(); - try { - // First, get all group Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.TEST_ENTITY_NAME, - query, - Collections.emptyMap(), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.TEST_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListTestsResult result = new ListTestsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setTests(mapUnresolvedTests(gmsResult.getEntities())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list tests", e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Now that we have entities we can bind this to a result. + final ListTestsResult result = new ListTestsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setTests(mapUnresolvedTests(gmsResult.getEntities())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list tests", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - // This method maps urns returned from the list endpoint into Partial Test objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Test objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedTests(final SearchEntityArray entityArray) { final List results = new ArrayList<>(); for (final SearchEntity entity : entityArray) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java index 9c4b5a4d4e0fa..6cb55100ec08e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java @@ -20,10 +20,7 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of tests for an entity - */ +/** GraphQL Resolver used for fetching the list of tests for an entity */ @Slf4j public class TestResultsResolver implements DataFetcher> { @@ -38,42 +35,44 @@ public CompletableFuture get(DataFetchingEnvironment environment) t final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); + return CompletableFuture.supplyAsync( + () -> { + final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); - if (gmsTestResults == null) { - return null; - } + if (gmsTestResults == null) { + return null; + } - TestResults testResults = new TestResults(); - testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); - testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); - return testResults; - }); + TestResults testResults = new TestResults(); + testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); + testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); + return testResults; + }); } @Nullable - private com.linkedin.test.TestResults getTestResults(final Urn entityUrn, final QueryContext context) { + private com.linkedin.test.TestResults getTestResults( + final Urn entityUrn, final QueryContext context) { try { - final EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), - context.getAuthentication()); - if (entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { + final EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { return new com.linkedin.test.TestResults( - entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME) - .getValue() - .data()); + entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME).getValue().data()); } return null; } catch (Exception e) { - throw new RuntimeException("Failed to get test results", e); + throw new RuntimeException("Failed to get test results", e); } } - private List mapTestResults(final @Nonnull List gmsResults) { + private List mapTestResults( + final @Nonnull List gmsResults) { final List results = new ArrayList<>(); for (com.linkedin.test.TestResult gmsResult : gmsResults) { results.add(mapTestResult(gmsResult)); @@ -89,4 +88,4 @@ private TestResult mapTestResult(final @Nonnull com.linkedin.test.TestResult gms testResult.setType(TestResultType.valueOf(gmsResult.getType().toString())); return testResult; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 248da3e58d8ae..922c28097f83c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,14 +17,9 @@ import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; - - public class TestUtils { - /** - * Returns true if the authenticated user is able to manage tests. - */ + /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } @@ -38,11 +35,12 @@ public static EntityResponse buildEntityResponse(Map asp final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java index 1dd8518076796..b5d6e50fe0774 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -13,15 +18,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Updates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Updates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class UpdateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -35,26 +32,30 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { - final String urn = environment.getArgument("urn"); - final UpdateTestInput input = bindArgument(environment.getArgument("input"), UpdateTestInput.class); + final String urn = environment.getArgument("urn"); + final UpdateTestInput input = + bindArgument(environment.getArgument("input"), UpdateTestInput.class); - // Update the Test info - currently this simply creates a new test with same urn. - final TestInfo info = mapUpdateTestInput(input); + // Update the Test info - currently this simply creates a new test with same urn. + final TestInfo info = mapUpdateTestInput(input); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); - try { - return _entityClient.ingestProposal(proposal, authentication, false); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); + try { + return _entityClient.ingestProposal(proposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapUpdateTestInput(final UpdateTestInput input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java index 499e7c9ac177d..ea234280ed6c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -16,15 +18,13 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. TODO: Add tests for this resolver. */ @Slf4j -public class GetSchemaBlameResolver implements DataFetcher> { +public class GetSchemaBlameResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaBlameResolver(TimelineService timelineService) { @@ -32,37 +32,37 @@ public GetSchemaBlameResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final GetSchemaBlameInput input = + bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); final String datasetUrnString = input.getDatasetUrn(); final long startTime = 0; final long endTime = 0; final String version = input.getVersion() == null ? null : input.getVersion(); - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); - final Urn datasetUrn = Urn.createFromString(datasetUrnString); - final List changeTransactionList = - _timelineService.getTimeline( - datasetUrn, - changeCategorySet, - startTime, - endTime, - null, - null, - false); - return SchemaBlameMapper.map(changeTransactionList, version); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set changeCategorySet = + Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); + final Urn datasetUrn = Urn.createFromString(datasetUrnString); + final List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaBlameMapper.map(changeTransactionList, version); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java index cfad1395a61a8..5063dbbf7ccf3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListInput; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; @@ -16,14 +18,12 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. */ @Slf4j -public class GetSchemaVersionListResolver implements DataFetcher> { +public class GetSchemaVersionListResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaVersionListResolver(TimelineService timelineService) { @@ -31,7 +31,8 @@ public GetSchemaVersionListResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final GetSchemaVersionListInput input = bindArgument(environment.getArgument("input"), GetSchemaVersionListInput.class); @@ -39,23 +40,27 @@ public CompletableFuture get(final DataFetchingEnvir final long startTime = 0; final long endTime = 0; - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = new HashSet<>(); - changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); - Urn datasetUrn = Urn.createFromString(datasetUrnString); - List changeTransactionList = - _timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false); - return SchemaVersionListMapper.map(changeTransactionList); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set changeCategorySet = new HashSet<>(); + changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); + Urn datasetUrn = Urn.createFromString(datasetUrnString); + List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaVersionListMapper.map(changeTransactionList); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java index 45998bdae45b0..14429696fefd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java @@ -5,16 +5,18 @@ import graphql.schema.TypeResolver; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface + * type. */ public class AspectInterfaceTypeResolver implements TypeResolver { - public AspectInterfaceTypeResolver() { } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this - // out in the case we ever want to return fields of type Aspect in graphql. Right now - // we just use Aspect to define the shared `version` field. - return null; - } + public AspectInterfaceTypeResolver() {} + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this + // out in the case we ever want to return fields of type Aspect in graphql. Right now + // we just use Aspect to define the shared `version` field. + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java index 1a5f06da04014..52c20254332b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java @@ -6,27 +6,29 @@ import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - import java.util.List; import java.util.stream.Collectors; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface + * type. */ public class EntityInterfaceTypeResolver implements TypeResolver { - private final List> _entities; + private final List> _entities; - public EntityInterfaceTypeResolver(final List> entities) { - _entities = entities; - } + public EntityInterfaceTypeResolver(final List> entities) { + _entities = entities; + } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - Object javaObject = env.getObject(); - final LoadableType filteredEntity = Iterables.getOnlyElement(_entities.stream() + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + Object javaObject = env.getObject(); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _entities.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); - } + return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java index a69500f24ee24..aeeb9bafa1f4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java @@ -4,30 +4,30 @@ import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class HyperParameterValueTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; - public static final String INT_BOX = "IntBox"; - public static final String FLOAT_BOX = "FloatBox"; - public static final String BOOLEAN_BOX = "BooleanBox"; + public static final String STRING_BOX = "StringBox"; + public static final String INT_BOX = "IntBox"; + public static final String FLOAT_BOX = "FloatBox"; + public static final String BOOLEAN_BOX = "BooleanBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else if (env.getObject() instanceof IntBox) { - return env.getSchema().getObjectType(INT_BOX); - } else if (env.getObject() instanceof BooleanBox) { - return env.getSchema().getObjectType(BOOLEAN_BOX); - } else if (env.getObject() instanceof FloatBox) { - return env.getSchema().getObjectType(FLOAT_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else if (env.getObject() instanceof IntBox) { + return env.getSchema().getObjectType(INT_BOX); + } else if (env.getObject() instanceof BooleanBox) { + return env.getSchema().getObjectType(BOOLEAN_BOX); + } else if (env.getObject() instanceof FloatBox) { + return env.getSchema().getObjectType(FLOAT_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java index 25a9a540f51b1..ff190cff1339e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java @@ -8,17 +8,17 @@ public class PlatformSchemaUnionTypeResolver implements TypeResolver { - private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; - private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; + private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; + private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TableSchema) { - return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); - } else if (env.getObject() instanceof KeyValueSchema) { - return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TableSchema) { + return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); + } else if (env.getObject() instanceof KeyValueSchema) { + return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); + } else { + throw new RuntimeException("Unrecognized object type provided to type resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java index 0dc7b0485c51c..c5be5725f1d45 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.type; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class ResultsTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; + public static final String STRING_BOX = "StringBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java index 5263e6b9b7df6..c66588008b103 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java @@ -6,7 +6,8 @@ public class TimeSeriesAspectInterfaceTypeResolver implements TypeResolver { - public TimeSeriesAspectInterfaceTypeResolver() { } + public TimeSeriesAspectInterfaceTypeResolver() {} + @Override public GraphQLObjectType getType(TypeResolutionEnvironment env) { // TODO(John): Fill this out. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java index d02f1a5f786a7..db26da05a2ba4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -10,14 +13,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Resolver responsible for creating a password reset token that Admins can share with native users to reset their - * credentials. + * Resolver responsible for creating a password reset token that Admins can share with native users + * to reset their credentials. */ -public class CreateNativeUserResetTokenResolver implements DataFetcher> { +public class CreateNativeUserResetTokenResolver + implements DataFetcher> { private final NativeUserService _nativeUserService; public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { @@ -25,7 +26,8 @@ public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserServ } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateNativeUserResetTokenInput input = bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); @@ -38,15 +40,18 @@ public CompletableFuture get(final DataFetchingEnvironment environme "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - String resetToken = - _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); - return new ResetToken(resetToken); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to generate password reset token for user: %s", userUrnString)); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken( + userUrnString, context.getAuthentication()); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to generate password reset token for user: %s", userUrnString)); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 69da642ad6bb1..215d53299c8ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,10 +26,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListUsersResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -40,48 +39,60 @@ public ListUsersResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListUsersInput input = bindArgument(environment.getArgument("input"), ListUsersInput.class); + final ListUsersInput input = + bindArgument(environment.getArgument("input"), ListUsersInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all policy Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_USER_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get hydrate all users. - final Map entities = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()) - ), null, context.getAuthentication()); + // Then, get hydrate all users. + final Map entities = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListUsersResult result = new ListUsersResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list users", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListUsersResult result = new ListUsersResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setUsers(mapEntities(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list users", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final Collection entities) { - return entities.stream() - .map(CorpUserMapper::map) - .collect(Collectors.toList()); + return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java index 718810e4710e7..7131a9d2a9a26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp User - */ +/** Resolver responsible for hard deleting a particular DataHub Corp User */ @Slf4j public class RemoveUserResolver implements DataFetcher> { @@ -24,30 +21,39 @@ public RemoveUserResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String userUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(userUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for user with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against user with urn %s", userUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against user with urn %s", userUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java index ab04d26fb5801..6a0e81a10f40b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,12 +16,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform privilege. + * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform + * privilege. */ public class UpdateUserStatusResolver implements DataFetcher> { @@ -37,20 +37,28 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final CorpUserStatus newStatus = CorpUserStatus.valueOf(environment.getArgument("status")); // Create ths status aspect - final com.linkedin.identity.CorpUserStatus statusAspect = new com.linkedin.identity.CorpUserStatus(); + final com.linkedin.identity.CorpUserStatus statusAspect = + new com.linkedin.identity.CorpUserStatus(); statusAspect.setStatus(newStatus.toString()); - statusAspect.setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(Urn.createFromString(context.getActorUrn()))); - - return CompletableFuture.supplyAsync(() -> { - try { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(userUrn), - CORP_USER_STATUS_ASPECT_NAME, statusAspect); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update user status for urn", userUrn), e); - } - }); + statusAspect.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + + return CompletableFuture.supplyAsync( + () -> { + try { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(userUrn), CORP_USER_STATUS_ASPECT_NAME, statusAspect); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update user status for urn", userUrn), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java index 6e39879dd56bc..830c9013835d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,12 +20,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class CreateViewResolver implements DataFetcher> { @@ -34,29 +31,34 @@ public CreateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateViewInput input = bindArgument(environment.getArgument("input"), CreateViewInput.class); + final CreateViewInput input = + bindArgument(environment.getArgument("input"), CreateViewInput.class); - return CompletableFuture.supplyAsync(() -> { - if (ViewUtils.canCreateView( - DataHubViewType.valueOf(input.getViewType().toString()), - context)) { - try { - final Urn urn = _viewService.createView( - DataHubViewType.valueOf(input.getViewType().toString()), - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - return createView(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create View with input: %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (ViewUtils.canCreateView( + DataHubViewType.valueOf(input.getViewType().toString()), context)) { + try { + final Urn urn = + _viewService.createView( + DataHubViewType.valueOf(input.getViewType().toString()), + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + return createView(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create View with input: %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateViewInput input) { @@ -66,15 +68,20 @@ private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateView .setViewType(input.getViewType()) .setName(input.getName()) .setDescription(input.getDescription()) - .setDefinition(new DataHubViewDefinition( - input.getDefinition().getEntityTypes(), - new DataHubViewFilter( - input.getDefinition().getFilter().getOperator(), - input.getDefinition().getFilter().getFilters().stream().map(filterInput -> - new FacetFilter(filterInput.getField(), filterInput.getCondition(), - filterInput.getValues(), - filterInput.getNegated())) - .collect(Collectors.toList())))) + .setDefinition( + new DataHubViewDefinition( + input.getDefinition().getEntityTypes(), + new DataHubViewFilter( + input.getDefinition().getFilter().getOperator(), + input.getDefinition().getFilter().getFilters().stream() + .map( + filterInput -> + new FacetFilter( + filterInput.getField(), + filterInput.getCondition(), + filterInput.getValues(), + filterInput.getNegated())) + .collect(Collectors.toList())))) .build(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java index 2b8c3b8640aa8..a3b21ad0c9681 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java @@ -11,10 +11,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub View - */ +/** Resolver responsible for hard deleting a particular DataHub View */ @Slf4j public class DeleteViewResolver implements DataFetcher> { @@ -25,24 +22,27 @@ public DeleteViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.deleteView(urn, context.getAuthentication()); - log.info(String.format("Successfully deleted View %s with urn", urn)); - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.deleteView(urn, context.getAuthentication()); + log.info(String.format("Successfully deleted View %s with urn", urn)); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against View with urn %s", urn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 51bbcfcfa25ae..caa37f8264854 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,20 +32,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing global DataHub Views. - */ +/** Resolver used for listing global DataHub Views. */ @Slf4j public class ListGlobalViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -55,43 +51,50 @@ public ListGlobalViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListGlobalViewsInput input = bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); + final ListGlobalViewsInput input = + bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - try { + try { - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list global Views", e); - } - }); + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list global Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -107,7 +110,12 @@ private Filter buildFilters() { final AndFilterInput globalCriteria = new AndFilterInput(); List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(DataHubViewType.GLOBAL.name()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, + null, + ImmutableList.of(DataHubViewType.GLOBAL.name()), + false, + FilterOperator.EQUAL)); globalCriteria.setAnd(andConditions); return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index d8705e216503c..945d2d50bcc3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,21 +32,15 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing the current user's DataHub Views. - */ +/** Resolver used for listing the current user's DataHub Views. */ @Slf4j public class ListMyViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; private static final String CREATOR_URN_FIELD = "createdBy"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -56,44 +52,52 @@ public ListMyViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListMyViewsInput input = bindArgument(environment.getArgument("input"), ListMyViewsInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final String viewType = input.getViewType() == null ? null : input.getViewType().toString(); - - try { - - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(viewType, context.getActorUrn()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Views", e); - } - }); + final ListMyViewsInput input = + bindArgument(environment.getArgument("input"), ListMyViewsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final String viewType = + input.getViewType() == null ? null : input.getViewType().toString(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(viewType, context.getActorUrn()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -110,14 +114,12 @@ private Filter buildFilters(@Nullable final String viewType, final String creato final AndFilterInput filterCriteria = new AndFilterInput(); final List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(CREATOR_URN_FIELD, - null, - ImmutableList.of(creatorUrn), - false, - FilterOperator.EQUAL)); + new FacetFilterInput( + CREATOR_URN_FIELD, null, ImmutableList.of(creatorUrn), false, FilterOperator.EQUAL)); if (viewType != null) { andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); } filterCriteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 61e22da3c9444..5a52a57d9c374 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,11 +18,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class UpdateViewResolver implements DataFetcher> { @@ -31,40 +29,47 @@ public UpdateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); - final UpdateViewInput input = bindArgument(environment.getArgument("input"), UpdateViewInput.class); + final UpdateViewInput input = + bindArgument(environment.getArgument("input"), UpdateViewInput.class); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.updateView( - urn, - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.updateView( + urn, + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated View %s with urn", urn)); + return getView(urn, context.getAuthentication()); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private DataHubView getView(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + private DataHubView getView( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); + String.format( + "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } return DataHubViewMapper.map(maybeResponse); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index dda0c3bebc2eb..9da5f915ff31d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -26,39 +26,40 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ViewUtils { /** * Returns true if the authenticated actor is allowed to create a view with the given parameters. * - * The user can create a View if it's a personal View specific to them, or - * if it's a Global view and they have the correct Platform privileges. + *

The user can create a View if it's a personal View specific to them, or if it's a Global + * view and they have the correct Platform privileges. * * @param type the type of the new View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ public static boolean canCreateView( - @Nonnull DataHubViewType type, - @Nonnull QueryContext context) { + @Nonnull DataHubViewType type, @Nonnull QueryContext context) { Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(context, "context must not be null"); return DataHubViewType.PERSONAL.equals(type) - || (DataHubViewType.GLOBAL.equals(type) && AuthorizationUtils.canManageGlobalViews(context)); + || (DataHubViewType.GLOBAL.equals(type) + && AuthorizationUtils.canManageGlobalViews(context)); } - /** - * Returns true if the authenticated actor is allowed to update or delete - * the View with the specified urn. + * Returns true if the authenticated actor is allowed to update or delete the View with the + * specified urn. * * @param viewService an instance of {@link ViewService} * @param viewUrn the urn of the View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ - public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { + public static boolean canUpdateView( + @Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { Objects.requireNonNull(viewService, "viewService must not be null"); Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(context, "context must not be null"); @@ -67,16 +68,21 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U final DataHubViewInfo viewInfo = viewService.getViewInfo(viewUrn, context.getAuthentication()); if (viewInfo == null) { - throw new IllegalArgumentException(String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); } - // If the View is Global, then the user must have ability to manage global views OR must be its owner - if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) && AuthorizationUtils.canManageGlobalViews(context)) { + // If the View is Global, then the user must have ability to manage global views OR must be its + // owner + if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) + && AuthorizationUtils.canManageGlobalViews(context)) { return true; } // If the View is Personal, then the current actor must be the owner. - return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner( + viewInfo.getCreated().getActor(), + UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); } /** @@ -86,28 +92,32 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U * @return the GMS model */ @Nonnull - public static DataHubViewDefinition mapDefinition(@Nonnull final DataHubViewDefinitionInput input) { + public static DataHubViewDefinition mapDefinition( + @Nonnull final DataHubViewDefinitionInput input) { Objects.requireNonNull(input, "input must not be null"); final DataHubViewDefinition result = new DataHubViewDefinition(); if (input.getFilter() != null) { result.setFilter(mapFilter(input.getFilter()), SetMode.IGNORE_NULL); } - result.setEntityTypes(new StringArray(input.getEntityTypes().stream().map(EntityTypeMapper::getName).collect( - Collectors.toList()))); + result.setEntityTypes( + new StringArray( + input.getEntityTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()))); return result; } /** - * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} object, - * which is then persisted to the backend in an aspect. + * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} + * object, which is then persisted to the backend in an aspect. * - * We intentionally convert from a more rigid model to something more flexible to hedge for the case - * in which the views feature evolves to require more advanced filter capabilities. + *

We intentionally convert from a more rigid model to something more flexible to hedge for the + * case in which the views feature evolves to require more advanced filter capabilities. * - * The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), which cannot be - * rendered in full by the UI. We account for this on the read path by logging a warning and returning an empty - * View in such cases. + *

The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), + * which cannot be rendered in full by the UI. We account for this on the read path by logging a + * warning and returning an empty View in such cases. */ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { if (LogicalOperator.AND.equals(input.getOperator())) { @@ -121,19 +131,30 @@ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { private static Filter buildAndFilter(@Nonnull List input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(input.stream().map(ResolverUtils::criterionFromFilter).collect(Collectors.toList())))) - )); + result.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + input.stream() + .map(ResolverUtils::criterionFromFilter) + .collect(Collectors.toList())))))); return result; } private static Filter buildOrFilter(@Nonnull List input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(input.stream().map(filter -> - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(ResolverUtils.criterionFromFilter(filter)))) - ) - .collect(Collectors.toList()))); + result.setOr( + new ConjunctiveCriterionArray( + input.stream() + .map( + filter -> + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(ResolverUtils.criterionFromFilter(filter))))) + .collect(Collectors.toList()))); return result; } @@ -141,6 +162,5 @@ private static boolean isViewOwner(Urn creatorUrn, Urn actorUrn) { return creatorUrn.equals(actorUrn); } - private ViewUtils() { } - + private ViewUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java index 51fd503fff578..49c8c24c2b6be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java @@ -1,4 +1,3 @@ package com.linkedin.datahub.graphql.scalar; -public class LongScalarType { -} +public class LongScalarType {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java index 3bd8719a37abc..df7c729cb14c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java @@ -1,16 +1,18 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface BatchMutableType extends MutableType { - default Class batchInputClass() throws UnsupportedOperationException { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchInputClass method"); - } + default Class batchInputClass() throws UnsupportedOperationException { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchInputClass method"); + } - default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) throws Exception { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchUpdate method"); - } + default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) + throws Exception { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchUpdate method"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java index b50a229be0633..368c126131af2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java @@ -5,42 +5,46 @@ import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FacetFilterInput; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * Extension of {@link EntityType} containing methods required for 'browse' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. * @param the key type for the DataLoader */ public interface BrowsableEntityType extends EntityType { - /** - * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. - * - * @param path the path to find browse results under - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & count. - * - * @param urn the entity urn to fetch browse paths for - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception; + /** + * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. + * + * @param path the path to find browse results under + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & + * count. + * + * @param urn the entity urn to fetch browse paths for + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java index 4185288776c06..43e4c1be55b71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java @@ -3,20 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import java.util.function.Function; - /** - * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, etc.). + * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, + * etc.). * * @param : The GraphQL object type corresponding to the entity, must be of type {@link Entity} * @param the key type for the DataLoader */ public interface EntityType extends LoadableType { - /** - * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the Graph type, eg. 'DATASET' - */ - com.linkedin.datahub.graphql.generated.EntityType type(); - - Function getKeyProvider(); + /** + * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the + * Graph type, eg. 'DATASET' + */ + com.linkedin.datahub.graphql.generated.EntityType type(); + Function getKeyProvider(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java index a21fab09b79c3..9f9fe1f28994c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java @@ -2,10 +2,9 @@ import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; - import graphql.execution.DataFetcherResult; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * GQL graph type that can be loaded from a downstream service by primary key. @@ -15,35 +14,38 @@ */ public interface LoadableType { - /** - * Returns generated GraphQL class associated with the type - */ - Class objectClass(); - - /** - * Returns the name of the type, to be used in creating a corresponding GraphQL {@link org.dataloader.DataLoader} - */ - default String name() { - return objectClass().getSimpleName(); - } - - /** - * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity cannot be found. - * - * @param key to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) throws Exception { - return batchLoad(ImmutableList.of(key), context).get(0); - }; - - /** - * Retrieves an list of entities given a list of urn strings. The list returned is expected to - * be of same length of the list of urns, where nulls are provided in place of an entity object if an entity cannot be found. - * - * @param keys to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - List> batchLoad(@Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; - + /** Returns generated GraphQL class associated with the type */ + Class objectClass(); + + /** + * Returns the name of the type, to be used in creating a corresponding GraphQL {@link + * org.dataloader.DataLoader} + */ + default String name() { + return objectClass().getSimpleName(); + } + + /** + * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity + * cannot be found. + * + * @param key to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) + throws Exception { + return batchLoad(ImmutableList.of(key), context).get(0); + } + ; + + /** + * Retrieves an list of entities given a list of urn strings. The list returned is expected to be + * of same length of the list of urns, where nulls are provided in place of an entity object if an + * entity cannot be found. + * + * @param keys to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + List> batchLoad( + @Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java index 94f1200d3a783..fa24192913324 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - import javax.annotation.Nonnull; /** @@ -10,18 +9,16 @@ * @param : The input type corresponding to the write. */ public interface MutableType { - /** - * Returns generated GraphQL class associated with the input type - */ - - Class inputClass(); + /** Returns generated GraphQL class associated with the input type */ + Class inputClass(); - /** - * Update an entity by urn - * - * @param urn - * @param input input type - * @param context the {@link QueryContext} corresponding to the request. - */ - T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) throws Exception; + /** + * Update an entity by urn + * + * @param urn + * @param input input type + * @param context the {@link QueryContext} corresponding to the request. + */ + T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java index 96875956d22c1..a5ade054e71eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java @@ -6,52 +6,61 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.query.filter.Filter; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. * - * Extension of {@link EntityType} containing methods required for 'search' functionality. + *

Extension of {@link EntityType} containing methods required for 'search' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. */ @Deprecated public interface SearchableEntityType extends EntityType { - /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. - * - * Retrieves {@link SearchResults} corresponding to a given query string, list of filters, start index, & count. - * - * @param query query text - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Deprecated - SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of filters, & limit. - * - * @param query query text - * @param field the name of the field to autocomplete against, null if one was not provided - * @param filters list of filters that should be applied to search results, null if non were provided - * @param limit the maximum number of autocomplete suggestions to be returned - * @param context the {@link QueryContext} corresponding to the request. - */ - AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception; + /** + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. + * + *

Retrieves {@link SearchResults} corresponding to a given query string, list of filters, + * start index, & count. + * + * @param query query text + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Deprecated + SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of + * filters, & limit. + * + * @param query query text + * @param field the name of the field to autocomplete against, null if one was not provided + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param limit the maximum number of autocomplete suggestions to be returned + * @param context the {@link QueryContext} corresponding to the request. + */ + AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index c9e2c322ace8d..00e9badf5e345 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index f3fdfdaa86f9e..45e80822b12c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -2,8 +2,8 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.LoadableType; import com.linkedin.entity.EntityResponse; @@ -35,45 +35,55 @@ public String name() { } /** - * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list returned is expected to - * be of same length of the list of keys, where nulls are provided in place of an aspect object if an entity cannot be found. + * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list + * returned is expected to be of same length of the list of keys, where nulls are provided in + * place of an aspect object if an entity cannot be found. * * @param keys to retrieve * @param context the {@link QueryContext} corresponding to the request. */ - public List> batchLoad(@Nonnull List keys, @Nonnull QueryContext context) throws Exception { + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { try { - return keys.stream().map(key -> { - try { - Urn entityUrn = Urn.createFromString(key.getUrn()); + return keys.stream() + .map( + key -> { + try { + Urn entityUrn = Urn.createFromString(key.getUrn()); - Map response = _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication() - ); + Map response = + _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()); - EntityResponse entityResponse = response.get(entityUrn); + EntityResponse entityResponse = response.get(entityUrn); - if (entityResponse == null || entityResponse.getAspects().get(key.getAspectName()) == null) { - // The aspect was not found. Return null. - return DataFetcherResult.newResult().data(null).build(); - } - final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); - return DataFetcherResult.newResult().data(AspectMapper.map(aspect, entityUrn)).build(); - } catch (Exception e) { - if (e instanceof RestLiResponseException) { - // if no aspect is found, restli will return a 404 rather than null - // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls - if (((RestLiResponseException) e).getStatus() == 404) { - return DataFetcherResult.newResult().data(null).build(); - } - } - throw new RuntimeException(String.format("Failed to load Aspect for entity %s", key.getUrn()), e); - } - }).collect(Collectors.toList()); + if (entityResponse == null + || entityResponse.getAspects().get(key.getAspectName()) == null) { + // The aspect was not found. Return null. + return DataFetcherResult.newResult().data(null).build(); + } + final EnvelopedAspect aspect = + entityResponse.getAspects().get(key.getAspectName()); + return DataFetcherResult.newResult() + .data(AspectMapper.map(aspect, entityUrn)) + .build(); + } catch (Exception e) { + if (e instanceof RestLiResponseException) { + // if no aspect is found, restli will return a 404 rather than null + // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls + if (((RestLiResponseException) e).getStatus() == 404) { + return DataFetcherResult.newResult().data(null).build(); + } + } + throw new RuntimeException( + String.format("Failed to load Aspect for entity %s", key.getUrn()), e); + } + }) + .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Aspects", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index e1d81bb31f471..2536f4d2521ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.stream.Collectors; - public class AssertionMapper { public static Assertion map(final EntityResponse entityResponse) { @@ -36,15 +35,18 @@ public static Assertion map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.ASSERTION); - final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); @@ -60,7 +62,8 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { - DatasetAssertionInfo datasetAssertion = mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + DatasetAssertionInfo datasetAssertion = + mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } return assertionInfo; @@ -69,25 +72,25 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion private static DatasetAssertionInfo mapDatasetAssertionInfo( final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); - datasetAssertion.setDatasetUrn( - gmsDatasetAssertion.getDataset().toString()); - datasetAssertion.setScope( - DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); + datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); + datasetAssertion.setScope(DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); if (gmsDatasetAssertion.hasFields()) { - datasetAssertion.setFields(gmsDatasetAssertion.getFields() - .stream() - .map(AssertionMapper::mapDatasetSchemaField) - .collect(Collectors.toList())); + datasetAssertion.setFields( + gmsDatasetAssertion.getFields().stream() + .map(AssertionMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } else { datasetAssertion.setFields(Collections.emptyList()); } // Agg if (gmsDatasetAssertion.hasAggregation()) { - datasetAssertion.setAggregation(AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); + datasetAssertion.setAggregation( + AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); } // Op - datasetAssertion.setOperator(AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); + datasetAssertion.setOperator( + AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); // Params if (gmsDatasetAssertion.hasParameters()) { @@ -98,7 +101,8 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( datasetAssertion.setNativeType(gmsDatasetAssertion.getNativeType()); } if (gmsDatasetAssertion.hasNativeParameters()) { - datasetAssertion.setNativeParameters(StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + datasetAssertion.setNativeParameters( + StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } @@ -119,7 +123,8 @@ private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { return new SchemaFieldRef(schemaFieldUrn.toString(), schemaFieldUrn.getEntityKey().get(1)); } - private static AssertionStdParameters mapParameters(final com.linkedin.assertion.AssertionStdParameters params) { + private static AssertionStdParameters mapParameters( + final com.linkedin.assertion.AssertionStdParameters params) { final AssertionStdParameters result = new AssertionStdParameters(); if (params.hasValue()) { result.setValue(mapParameter(params.getValue())); @@ -133,13 +138,13 @@ private static AssertionStdParameters mapParameters(final com.linkedin.assertion return result; } - private static AssertionStdParameter mapParameter(final com.linkedin.assertion.AssertionStdParameter param) { + private static AssertionStdParameter mapParameter( + final com.linkedin.assertion.AssertionStdParameter param) { final AssertionStdParameter result = new AssertionStdParameter(); result.setType(AssertionStdParameterType.valueOf(param.getType().name())); result.setValue(param.getValue()); return result; } - private AssertionMapper() { - } + private AssertionMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index 3493afdd8bd84..ac5cce1191e5d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -20,69 +20,71 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AssertionType + implements com.linkedin.datahub.graphql.types.EntityType { -public class AssertionType implements com.linkedin.datahub.graphql.types.EntityType { + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ASSERTION_KEY_ASPECT_NAME, + Constants.ASSERTION_INFO_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + private final EntityClient _entityClient; - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ASSERTION_KEY_ASPECT_NAME, - Constants.ASSERTION_INFO_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME - ); - private final EntityClient _entityClient; + public AssertionType(final EntityClient entityClient) { + _entityClient = entityClient; + } - public AssertionType(final EntityClient entityClient) { - _entityClient = entityClient; - } + @Override + public EntityType type() { + return EntityType.ASSERTION; + } - @Override - public EntityType type() { - return EntityType.ASSERTION; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return Assertion.class; - } + @Override + public Class objectClass() { + return Assertion.class; + } - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List assertionUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List assertionUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(AssertionMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Assertions", e); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AssertionMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Assertions", e); } + } - private Urn getUrn(final String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); - } + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } -} \ No newline at end of file + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index d9f25a7cec8e1..bfe2ccbe34166 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -4,9 +4,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.types.auth.mappers.AccessTokenMetadataMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class AccessTokenMetadataType implements com.linkedin.datahub.graphql.types.EntityType { @@ -48,13 +47,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List keys, - @Nonnull QueryContext context) throws Exception { - final List tokenInfoUrns = keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { + final List tokenInfoUrns = + keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(Constants.ACCESS_TOKEN_ENTITY_NAME, new HashSet<>(tokenInfoUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + new HashSet<>(tokenInfoUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -62,9 +65,13 @@ public List> batchLoad(@Nonnull List gmsResult == null ? null : DataFetcherResult.newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AccessTokenMetadataMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Access Token Info", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index 9b38757879896..a519a65e5cb6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,8 +2,8 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -11,7 +11,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AccessTokenMetadataMapper implements ModelMapper { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); @@ -29,13 +28,15 @@ public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { metadata.setType(EntityType.ACCESS_TOKEN); final EnvelopedAspectMap aspectMap = input.getAspects(); - final MappingHelper mappingHelper = new MappingHelper<>(aspectMap, metadata); + final MappingHelper mappingHelper = + new MappingHelper<>(aspectMap, metadata); mappingHelper.mapToResult(Constants.ACCESS_TOKEN_INFO_NAME, this::mapTokenInfo); return mappingHelper.getResult(); } - private void mapTokenInfo(@Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { + private void mapTokenInfo( + @Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(dataMap); accessTokenMetadata.setName(tokenInfo.getName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index fa0e3cd856803..ba8e96159b0bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.chart; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.ChartUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -36,8 +39,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,203 +57,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - +public class ChartType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { -public class ChartType implements SearchableEntityType, BrowsableEntityType, MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + CHART_KEY_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, + CHART_QUERY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set FACET_FIELDS = + ImmutableSet.of("access", "queryType", "tool", "type"); - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - CHART_KEY_ASPECT_NAME, - CHART_INFO_ASPECT_NAME, - EDITABLE_CHART_PROPERTIES_ASPECT_NAME, - CHART_QUERY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); - - private final EntityClient _entityClient; - - public ChartType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return ChartUpdateInput.class; - } + public ChartType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.CHART; - } + @Override + public Class inputClass() { + return ChartUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.CHART; + } - @Override - public Class objectClass() { - return Chart.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map chartMap = - _entityClient.batchGetV2( - CHART_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + @Override + public Class objectClass() { + return Chart.class; + } - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(chartMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsChart -> gmsChart == null ? null : DataFetcherResult.newResult() - .data(ChartMapper.map(gmsChart)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Charts", e); - } - } + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map chartMap = + _entityClient.batchGetV2( + CHART_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "chart", - query, - facetFilters, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(chartMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsChart -> + gmsChart == null + ? null + : DataFetcherResult.newResult() + .data(ChartMapper.map(gmsChart)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Charts", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "chart", query, - filters, - limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "chart", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "chart", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - private ChartUrn getChartUrn(String urnStr) { - try { - return ChartUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); - } - } + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - @Override - public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = ChartUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + private ChartUrn getChartUrn(String urnStr) { + try { + return ChartUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); + } + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public Chart update( + @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = ChartUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } - private boolean isAuthorized(@Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CHART_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CHART_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index e0ffc57ddf519..0ef52c9f45716 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -28,13 +30,13 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -51,184 +53,211 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartMapper implements ModelMapper { + public static final ChartMapper INSTANCE = new ChartMapper(); -public class ChartMapper implements ModelMapper { + public static Chart map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - public static final ChartMapper INSTANCE = new ChartMapper(); + @Override + public Chart apply(@Nonnull final EntityResponse entityResponse) { + final Chart result = new Chart(); + Urn entityUrn = entityResponse.getUrn(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CHART); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { - final Chart result = new Chart(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CHART); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); - mappingHelper.mapToResult(EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); + mappingHelper.mapToResult( + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (chart, dataMap) -> chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (chart, dataMap) -> - chart.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> - chart.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (chart, dataMap) -> + chart.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (chart, dataMap) -> chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (chart, dataMap) -> - chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (chart, dataMap) -> chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) -> - chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } - private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final ChartKey gmsKey = new ChartKey(dataMap); - chart.setChartId(gmsKey.getChartId()); - chart.setTool(gmsKey.getDashboardTool()); - chart.setPlatform(DataPlatform.builder() + private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final ChartKey gmsKey = new ChartKey(dataMap); + chart.setChartId(gmsKey.getChartId()); + chart.setTool(gmsKey.getDashboardTool()); + chart.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapChartInfo(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); - } + private void mapChartInfo( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); + chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} - */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartInfo result = new ChartInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasInputs()) { - result.setInputs(info.getInputs().stream().map(input -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(input.getDatasetUrn().toString()); - return dataset; - }).collect(Collectors.toList())); - } - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; - } + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ + private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartInfo result = new ChartInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} - */ - private ChartProperties mapChartInfoToProperties(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartProperties result = new ChartProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasInputs()) { + result.setInputs( + info.getInputs().stream() + .map( + input -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(input.getDatasetUrn().toString()); + return dataset; + }) + .collect(Collectors.toList())); } - private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); - chart.setQuery(mapQuery(gmsChartQuery)); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { - final ChartQuery result = new ChartQuery(); - result.setRawQuery(query.getRawQuery()); - result.setType(ChartQueryType.valueOf(query.getType().toString())); - return result; + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); } - - private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); - final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); - chartEditableProperties.setDescription(editableChartProperties.getDescription()); - chart.setEditableProperties(chartEditableProperties); + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ + private ChartProperties mapChartInfoToProperties( + final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartProperties result = new ChartProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - private void mapGlobalTags(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - chart.setGlobalTags(globalTags); - chart.setTags(globalTags); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); + chart.setQuery(mapQuery(gmsChartQuery)); + } + + private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { + final ChartQuery result = new ChartQuery(); + result.setRawQuery(query.getRawQuery()); + result.setType(ChartQueryType.valueOf(query.getType().toString())); + return result; + } + + private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); + final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); + chartEditableProperties.setDescription(editableChartProperties.getDescription()); + chart.setEditableProperties(chartEditableProperties); + } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - chart.setContainer(Container - .builder() + private void mapGlobalTags( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + chart.setGlobalTags(globalTags); + chart.setTags(globalTags); + } + + private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + chart.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); - } + private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index b52ddad0b0071..f2a434b58686c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -17,68 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartUpdateInputMapper + implements InputModelMapper, Urn> { + public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); -public class ChartUpdateInputMapper implements InputModelMapper, Urn> { + public static Collection map( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(chartUpdateInput, actor); + } - public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); + @Override + public Collection apply( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - public static Collection map(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + if (chartUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } - @Override - public Collection apply(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - - if (chartUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper - .aspectToProposal(OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); - } - - if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (chartUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - // Tags overrides global tags if provided - if (chartUpdateInput.getTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } - - if (chartUpdateInput.getEditableProperties() != null) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(); - editableChartProperties.setDescription(chartUpdateInput.getEditableProperties().getDescription()); - if (!editableChartProperties.hasCreated()) { - editableChartProperties.setCreated(auditStamp); - } - editableChartProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); - } + if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (chartUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + // Tags overrides global tags if provided + if (chartUpdateInput.getTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (chartUpdateInput.getEditableProperties() != null) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(); + editableChartProperties.setDescription( + chartUpdateInput.getEditableProperties().getDescription()); + if (!editableChartProperties.hasCreated()) { + editableChartProperties.setCreated(auditStamp); + } + editableChartProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index d6ef713f3ade6..4da18403f95cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -7,29 +7,36 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class InputFieldsMapper { - public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); - - public static com.linkedin.datahub.graphql.generated.InputFields map(@Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.InputFields apply(@Nonnull final InputFields input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); - result.setFields(input.getFields().stream().map(field -> { - InputField fieldResult = new InputField(); - - if (field.hasSchemaField()) { - fieldResult.setSchemaField(SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); - } - if (field.hasSchemaFieldUrn()) { - fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); - } - return fieldResult; - }).collect(Collectors.toList())); - - return result; - } + public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); + + public static com.linkedin.datahub.graphql.generated.InputFields map( + @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.InputFields apply( + @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.InputFields result = + new com.linkedin.datahub.graphql.generated.InputFields(); + result.setFields( + input.getFields().stream() + .map( + field -> { + InputField fieldResult = new InputField(); + + if (field.hasSchemaField()) { + fieldResult.setSchemaField( + SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + } + if (field.hasSchemaFieldUrn()) { + fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); + } + return fieldResult; + }) + .collect(Collectors.toList())); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index beb2b64e1dd7d..1f952bb6a2bd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class AuditStampMapper implements ModelMapper { - public static final AuditStampMapper INSTANCE = new AuditStampMapper(); + public static final AuditStampMapper INSTANCE = new AuditStampMapper(); - public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - return INSTANCE.apply(auditStamp); - } + public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + return INSTANCE.apply(auditStamp); + } - @Override - public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - final AuditStamp result = new AuditStamp(); - result.setActor(auditStamp.getActor().toString()); - result.setTime(auditStamp.getTime()); - return result; - } + @Override + public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + final AuditStamp result = new AuditStamp(); + result.setActor(auditStamp.getActor().toString()); + result.setTime(auditStamp.getTime()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java index 41ee99fa412ad..79b7cf8e050d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java @@ -4,10 +4,9 @@ import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowsePathV2; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathsV2Mapper implements ModelMapper { @@ -20,7 +19,8 @@ public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { @Override public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); - final List path = input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + final List path = + input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); result.setPath(path); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index 7144730ba9337..e3a09bc8926a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -3,8 +3,8 @@ import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -public class ChangeAuditStampsMapper implements ModelMapper { +public class ChangeAuditStampsMapper + implements ModelMapper { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 6c8bdada17b24..806e8e6aadc5b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,26 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nonnull; import lombok.NonNull; public class CostMapper implements ModelMapper { - public static final CostMapper INSTANCE = new CostMapper(); + public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); - } + public static Cost map(@NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(cost); + } - @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { - final Cost result = new Cost(); - result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); - return result; - } + @Override + public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + final Cost result = new Cost(); + result.setCostType(CostType.valueOf(cost.getCostType().name())); + result.setCostValue(CostValueMapper.map(cost.getCost())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 3f41c92cd1715..56c107f7ec059 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -2,25 +2,24 @@ import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class CostValueMapper implements ModelMapper { - public static final CostValueMapper INSTANCE = new CostValueMapper(); + public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); - } + public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(costValue); + } - @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { - final CostValue result = new CostValue(); - if (costValue.isCostCode()) { - result.setCostCode(costValue.getCostCode()); - } - if (costValue.isCostId()) { - result.setCostId(costValue.getCostId().floatValue()); - } - return result; + @Override + public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + final CostValue result = new CostValue(); + if (costValue.isCostCode()) { + result.setCostCode(costValue.getCostCode()); + } + if (costValue.isCostId()) { + result.setCostId(costValue.getCostId().floatValue()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java index 50e4846611a9b..b09678ddeb42e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java @@ -1,36 +1,36 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.CustomPropertiesEntry; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CustomPropertiesMapper { - public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); + public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); - public static List map(@Nonnull final Map input, @Nonnull Urn urn) { - return INSTANCE.apply(input, urn); - } + public static List map( + @Nonnull final Map input, @Nonnull Urn urn) { + return INSTANCE.apply(input, urn); + } - public List apply(@Nonnull final Map input, @Nonnull Urn urn) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final CustomPropertiesEntry entry = new CustomPropertiesEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - entry.setAssociatedUrn(urn.toString()); - results.add(entry); - } - return results; + public List apply( + @Nonnull final Map input, @Nonnull Urn urn) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final CustomPropertiesEntry entry = new CustomPropertiesEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + entry.setAssociatedUrn(urn.toString()); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 1f10cd6ee3658..a2236f7e8586d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -3,14 +3,16 @@ import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DataPlatformInstanceAspectMapper implements ModelMapper { +public class DataPlatformInstanceAspectMapper + implements ModelMapper { - public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper(); + public static final DataPlatformInstanceAspectMapper INSTANCE = + new DataPlatformInstanceAspectMapper(); - public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { + public static DataPlatformInstance map( + @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { return INSTANCE.apply(dataPlatformInstance); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 4bbf50bb72362..7a88474166915 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,24 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class DeprecationMapper implements ModelMapper { - public static final DeprecationMapper INSTANCE = new DeprecationMapper(); +public class DeprecationMapper + implements ModelMapper { + public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 478d256df66a4..339c6a848d9f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class EmbedMapper implements ModelMapper { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 9f4517c89a6dc..830cbb0e79d79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -1,44 +1,49 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.FineGrainedLineage; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; import com.linkedin.dataset.FineGrainedLineageArray; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import javax.annotation.Nonnull; public class FineGrainedLineagesMapper { public static final FineGrainedLineagesMapper INSTANCE = new FineGrainedLineagesMapper(); - public static List map(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { + public static List map( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { return INSTANCE.apply(fineGrainedLineages); } - public List apply(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { - final List result = new ArrayList<>(); + public List apply( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { + final List result = + new ArrayList<>(); if (fineGrainedLineages.size() == 0) { return result; } for (com.linkedin.dataset.FineGrainedLineage fineGrainedLineage : fineGrainedLineages) { - com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = + new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); if (fineGrainedLineage.hasUpstreams()) { - resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setUpstreams( + fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } if (fineGrainedLineage.hasDownstreams()) { - resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setDownstreams( + fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } result.add(resultEntry); } @@ -46,8 +51,7 @@ public List apply(@No } private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { - return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + return new SchemaFieldRef( + schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 8bcfe7eb3b6d0..4546e0e4d8dc0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -2,22 +2,25 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class InstitutionalMemoryMapper { - public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); + public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); - public static InstitutionalMemory map(@Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); - } + public static InstitutionalMemory map( + @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(memory, entityUrn); + } - public InstitutionalMemory apply(@Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { - final InstitutionalMemory result = new InstitutionalMemory(); - result.setElements(input.getElements().stream().map(metadata -> - InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)).collect(Collectors.toList())); - return result; - } + public InstitutionalMemory apply( + @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + final InstitutionalMemory result = new InstitutionalMemory(); + result.setElements( + input.getElements().stream() + .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index ba4d37173abb8..49a4618507086 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,33 +1,37 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.CorpUser; - +import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; public class InstitutionalMemoryMetadataMapper { - public static final InstitutionalMemoryMetadataMapper INSTANCE = new InstitutionalMemoryMetadataMapper(); + public static final InstitutionalMemoryMetadataMapper INSTANCE = + new InstitutionalMemoryMetadataMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public InstitutionalMemoryMetadata apply(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { - final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); - result.setUrl(input.getUrl().toString()); - result.setDescription(input.getDescription()); // deprecated field - result.setLabel(input.getDescription()); - result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + public InstitutionalMemoryMetadata apply( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); + result.setUrl(input.getUrl().toString()); + result.setDescription(input.getDescription()); // deprecated field + result.setLabel(input.getDescription()); + result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); + result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } - private CorpUser getAuthor(String actor) { - CorpUser partialUser = new CorpUser(); - partialUser.setUrn(actor); - return partialUser; - } + private CorpUser getAuthor(String actor) { + CorpUser partialUser = new CorpUser(); + partialUser.setUrn(actor); + return partialUser; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 28986dcae5725..87d865471708e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -1,31 +1,34 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper { +public class InstitutionalMemoryMetadataUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = new InstitutionalMemoryMetadataUpdateMapper(); + private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = + new InstitutionalMemoryMetadataUpdateMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); - metadata.setDescription(input.getDescription()); - metadata.setUrl(new Url(input.getUrl())); - metadata.setCreateStamp(new AuditStamp() + @Override + public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); + metadata.setDescription(input.getDescription()); + metadata.setUrl(new Url(input.getUrl())); + metadata.setCreateStamp( + new AuditStamp() .setActor(CorpUserUtils.getCorpUserUrn(input.getAuthor())) - .setTime(input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt()) - ); - return metadata; - } + .setTime( + input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt())); + return metadata; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index bf063896290eb..d8b451458e72c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -1,30 +1,30 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - -import javax.annotation.Nonnull; - import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class InstitutionalMemoryUpdateMapper implements ModelMapper { +public class InstitutionalMemoryUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); + private static final InstitutionalMemoryUpdateMapper INSTANCE = + new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { - final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); - institutionalMemory.setElements(new InstitutionalMemoryMetadataArray( - input.getElements() - .stream() + @Override + public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); + institutionalMemory.setElements( + new InstitutionalMemoryMetadataArray( + input.getElements().stream() .map(InstitutionalMemoryMetadataUpdateMapper::map) .collect(Collectors.toList()))); - return institutionalMemory; - } + return institutionalMemory; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 986954fab87db..37b625715edd5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -1,59 +1,66 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.Operation; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; -public class OperationMapper implements TimeSeriesAspectMapper { +public class OperationMapper + implements TimeSeriesAspectMapper { - public static final OperationMapper INSTANCE = new OperationMapper(); + public static final OperationMapper INSTANCE = new OperationMapper(); - public static com.linkedin.datahub.graphql.generated.Operation map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); - } + public static com.linkedin.datahub.graphql.generated.Operation map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.Operation apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + Operation gmsProfile = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + Operation.class); + + final com.linkedin.datahub.graphql.generated.Operation result = + new com.linkedin.datahub.graphql.generated.Operation(); - @Override - public com.linkedin.datahub.graphql.generated.Operation apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - Operation gmsProfile = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - Operation.class); - - final com.linkedin.datahub.graphql.generated.Operation result = - new com.linkedin.datahub.graphql.generated.Operation(); - - result.setTimestampMillis(gmsProfile.getTimestampMillis()); - result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); - if (gmsProfile.hasActor()) { - result.setActor(gmsProfile.getActor().toString()); - } - result.setOperationType(OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); - result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); - if (gmsProfile.hasSourceType()) { - result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); - } - if (gmsProfile.hasPartitionSpec()) { - result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); - } - if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); - } - if (gmsProfile.hasNumAffectedRows()) { - result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); - } - if (gmsProfile.hasAffectedDatasets()) { - result.setAffectedDatasets(gmsProfile.getAffectedDatasets().stream().map(Urn::toString).collect(Collectors.toList())); - } - - return result; + result.setTimestampMillis(gmsProfile.getTimestampMillis()); + result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); + if (gmsProfile.hasActor()) { + result.setActor(gmsProfile.getActor().toString()); } + result.setOperationType( + OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); + result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); + if (gmsProfile.hasSourceType()) { + result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); + } + if (gmsProfile.hasPartitionSpec()) { + result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); + } + if (gmsProfile.hasCustomProperties()) { + result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + } + if (gmsProfile.hasNumAffectedRows()) { + result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); + } + if (gmsProfile.hasAffectedDatasets()) { + result.setAffectedDatasets( + gmsProfile.getAffectedDatasets().stream() + .map(Urn::toString) + .collect(Collectors.toList())); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index 181bdc176fb94..ea15aefdad3b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -10,51 +12,49 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnerMapper { - public static final OwnerMapper INSTANCE = new OwnerMapper(); + public static final OwnerMapper INSTANCE = new OwnerMapper(); - public static Owner map(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + public static Owner map( + @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(owner, entityUrn); + } + + public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + final Owner result = new Owner(); + // Deprecated + result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); + + if (owner.getTypeUrn() == null) { + OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); + owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - final Owner result = new Owner(); - // Deprecated - result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); - - if (owner.getTypeUrn() == null) { - OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); - owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); - } - - if (owner.getTypeUrn() != null) { - OwnershipTypeEntity entity = new OwnershipTypeEntity(); - entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); - entity.setUrn(owner.getTypeUrn().toString()); - result.setOwnershipType(entity); - } - if (owner.getOwner().getEntityType().equals("corpuser")) { - CorpUser partialOwner = new CorpUser(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } else { - CorpGroup partialOwner = new CorpGroup(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } - if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); - } - result.setAssociatedUrn(entityUrn.toString()); - return result; + if (owner.getTypeUrn() != null) { + OwnershipTypeEntity entity = new OwnershipTypeEntity(); + entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); + entity.setUrn(owner.getTypeUrn().toString()); + result.setOwnershipType(entity); + } + if (owner.getOwner().getEntityType().equals("corpuser")) { + CorpUser partialOwner = new CorpUser(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } else { + CorpGroup partialOwner = new CorpGroup(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } + if (owner.hasSource()) { + result.setSource(OwnershipSourceMapper.map(owner.getSource())); } + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index d978abee5bdfc..a38c16d02f121 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -1,56 +1,56 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import com.linkedin.common.urn.UrnUtils; -import javax.annotation.Nonnull; - import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.OwnerUpdate; -import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; +import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import com.linkedin.common.urn.Urn; - import java.net.URISyntaxException; +import javax.annotation.Nonnull; public class OwnerUpdateMapper implements ModelMapper { - private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); + + public static Owner map(@Nonnull final OwnerUpdate input) { + return INSTANCE.apply(input); + } + + @Override + public Owner apply(@Nonnull final OwnerUpdate input) { + final Owner owner = new Owner(); + try { + if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { + owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); + } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { + owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } - - @Override - public Owner apply(@Nonnull final OwnerUpdate input) { - final Owner owner = new Owner(); - try { - if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { - owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); - } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { - owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); - } - } catch (URISyntaxException e) { - e.printStackTrace(); - } - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - } - // For backwards compatibility we have to always set the deprecated type. - // If the type exists we assume it's an old ownership type that we can map to. - // Else if it's a net new custom ownership type set old type to CUSTOM. - OwnershipType type = input.getType() != null ? OwnershipType.valueOf(input.getType().toString()) + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + } + // For backwards compatibility we have to always set the deprecated type. + // If the type exists we assume it's an old ownership type that we can map to. + // Else if it's a net new custom ownership type set old type to CUSTOM. + OwnershipType type = + input.getType() != null + ? OwnershipType.valueOf(input.getType().toString()) : OwnershipType.CUSTOM; - owner.setType(type); - - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - owner.setType(OwnershipType.CUSTOM); - } + owner.setType(type); - owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); - return owner; + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + owner.setType(OwnershipType.CUSTOM); } + + owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); + return owner; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 6614cfb28a478..31f637a047798 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -2,30 +2,31 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Ownership; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnershipMapper { - public static final OwnershipMapper INSTANCE = new OwnershipMapper(); + public static final OwnershipMapper INSTANCE = new OwnershipMapper(); - public static Ownership map(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); - } + public static Ownership map( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(ownership, entityUrn); + } - public Ownership apply(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); - result.setOwners(ownership.getOwners() - .stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) - .collect(Collectors.toList())); - return result; - } + public Ownership apply( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + final Ownership result = new Ownership(); + result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setOwners( + ownership.getOwners().stream() + .map(owner -> OwnerMapper.map(owner, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index abcc67c35f92a..75eaffb850a8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -3,28 +3,28 @@ import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class OwnershipSourceMapper implements ModelMapper { +public class OwnershipSourceMapper + implements ModelMapper { - public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); + public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); - public static OwnershipSource map(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); - } + public static OwnershipSource map( + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + return INSTANCE.apply(ownershipSource); + } - @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - final OwnershipSource result = new OwnershipSource(); - result.setUrl(ownershipSource.getUrl()); - result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); - return result; - } + @Override + public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + final OwnershipSource result = new OwnershipSource(); + result.setUrl(ownershipSource.getUrl()); + result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 1162c69d74938..97afbc7ddf855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -1,7 +1,5 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -9,31 +7,30 @@ import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper { - private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); + private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); - } + public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + return INSTANCE.apply(input, actor); + } - @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - final Ownership ownership = new Ownership(); + @Override + public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + final Ownership ownership = new Ownership(); - ownership.setOwners(new OwnerArray(input.getOwners() - .stream() - .map(OwnerUpdateMapper::map) - .collect(Collectors.toList()))); + ownership.setOwners( + new OwnerArray( + input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - ownership.setLastModified(auditStamp); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + ownership.setLastModified(auditStamp); - return ownership; - } + return ownership; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index f3ac008734339..e2d29d0297449 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps GraphQL SearchFlags to Pegasus * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SearchFlagsInputMapper implements ModelMapper { +public class SearchFlagsInputMapper + implements ModelMapper { public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); - public static com.linkedin.metadata.query.SearchFlags map(@Nonnull final SearchFlags searchFlags) { + public static com.linkedin.metadata.query.SearchFlags map( + @Nonnull final SearchFlags searchFlags) { return INSTANCE.apply(searchFlags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 942171017cea4..0758daf5df2e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -5,13 +5,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SiblingsMapper implements ModelMapper { +public class SiblingsMapper + implements ModelMapper { public static final SiblingsMapper INSTANCE = new SiblingsMapper(); @@ -23,10 +23,8 @@ public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); - result.setSiblings(siblings.getSiblings() - .stream() - .map(UrnToEntityMapper::map) - .collect(Collectors.toList())); + result.setSiblings( + siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 25d01d8de0e4c..2d1efdffc496c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -2,21 +2,20 @@ import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class StatusMapper implements ModelMapper { - public static final StatusMapper INSTANCE = new StatusMapper(); + public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); - } + public static Status map(@Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(metadata); + } - @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { - final Status result = new Status(); - result.setRemoved(input.isRemoved()); - return result; - } + @Override + public Status apply(@Nonnull final com.linkedin.common.Status input) { + final Status result = new Status(); + result.setRemoved(input.isRemoved()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 32c49a2010414..0e8d6822b7d09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -7,29 +7,28 @@ import java.util.Map; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class StringMapMapper implements ModelMapper, List> { - public static final StringMapMapper INSTANCE = new StringMapMapper(); + public static final StringMapMapper INSTANCE = new StringMapMapper(); - public static List map(@Nonnull final Map input) { - return INSTANCE.apply(input); - } + public static List map(@Nonnull final Map input) { + return INSTANCE.apply(input); + } - @Override - public List apply(@Nonnull final Map input) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final StringMapEntry entry = new StringMapEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - results.add(entry); - } - return results; + @Override + public List apply(@Nonnull final Map input) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final StringMapEntry entry = new StringMapEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 9aa94eae62999..55294e4b46822 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -5,17 +5,20 @@ import java.util.ArrayList; import javax.annotation.Nonnull; -public class SubTypesMapper implements ModelMapper { +public class SubTypesMapper + implements ModelMapper { public static final SubTypesMapper INSTANCE = new SubTypesMapper(); - public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + public static com.linkedin.datahub.graphql.generated.SubTypes map( + @Nonnull final SubTypes metadata) { return INSTANCE.apply(metadata); } @Override public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { - final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + final com.linkedin.datahub.graphql.generated.SubTypes result = + new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java index 8359f1ec86f34..4fdf7edea07d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -4,22 +4,24 @@ import java.util.List; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class UpstreamLineagesMapper { public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); - public static List map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + public static List map( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { return INSTANCE.apply(upstreamLineage); } - public List apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { - if (!upstreamLineage.hasFineGrainedLineages() || upstreamLineage.getFineGrainedLineages() == null) { + public List apply( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + if (!upstreamLineage.hasFineGrainedLineages() + || upstreamLineage.getFineGrainedLineages() == null) { return new ArrayList<>(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 34bf56a396b62..4c452af126201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; @@ -35,10 +37,7 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class UrnToEntityMapper implements ModelMapper { +public class UrnToEntityMapper implements ModelMapper { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 1e284efdb610f..0b156f11e8834 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -7,14 +7,10 @@ import lombok.AllArgsConstructor; import lombok.Getter; - @AllArgsConstructor public class MappingHelper { - @Nonnull - private final EnvelopedAspectMap _aspectMap; - @Getter - @Nonnull - private final O result; + @Nonnull private final EnvelopedAspectMap _aspectMap; + @Getter @Nonnull private final O result; public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { if (_aspectMap.containsKey(aspectName)) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java index 7d1b374e1f9b6..00e339a0320ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.Setter; - @Data @Setter @Getter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java index d08300d648c32..46df032cbffbf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java @@ -1,19 +1,17 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.mxe.SystemMetadata; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} @Nullable public static Long getLastIngestedTime(@Nonnull EnvelopedAspectMap aspectMap) { @@ -28,7 +26,8 @@ public static String getLastIngestedRunId(@Nonnull EnvelopedAspectMap aspectMap) } /** - * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects present for the entity. + * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects + * present for the entity. */ @Nonnull public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap aspectMap) { @@ -36,12 +35,16 @@ public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap asp for (String aspect : aspectMap.keySet()) { if (aspectMap.get(aspect).hasSystemMetadata()) { SystemMetadata systemMetadata = aspectMap.get(aspect).getSystemMetadata(); - if (systemMetadata.hasLastRunId() && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + if (systemMetadata.hasLastRunId() + && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getLastRunId(); RunInfo run = new RunInfo(runId, lastObserved); runs.add(run); - } else if (systemMetadata.hasRunId() && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + } else if (systemMetadata.hasRunId() + && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { // Handle the legacy case: Check original run ids. Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getRunId(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java index 108aa7ed5b0c9..606cebba0880f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java @@ -6,7 +6,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import lombok.AllArgsConstructor; - @AllArgsConstructor public class UpdateMappingHelper { private final String entityName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 20cfe6ac46127..1200493666a59 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,8 +18,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.net.URISyntaxException; @@ -33,31 +33,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -public class ContainerType implements SearchableEntityType, +public class ContainerType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME, - Constants.SUB_TYPES_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - Constants.CONTAINER_ASPECT_NAME, - Constants.DOMAINS_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.CONTAINER_PROPERTIES_ASPECT_NAME, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME, + Constants.SUB_TYPES_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + Constants.CONTAINER_ASPECT_NAME, + Constants.DOMAINS_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.DATA_PRODUCTS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; private final EntityClient _entityClient; - public ContainerType(final EntityClient entityClient) { + public ContainerType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -77,28 +77,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List containerUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List containerUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.CONTAINER_ENTITY_NAME, - new HashSet<>(containerUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.CONTAINER_ENTITY_NAME, + new HashSet<>(containerUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(ContainerMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ContainerMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Container", e); @@ -114,24 +116,36 @@ private Urn getUrn(final String urnStr) { } @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index b81259e78be3e..07594c53c6831 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.container.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -15,11 +17,11 @@ import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -33,9 +35,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class ContainerMapper { @Nullable @@ -49,46 +48,61 @@ public static Container map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } - final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedContainerProperties = + aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { - result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); + result.setProperties( + mapContainerProperties( + new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedEditableContainerProperties = + aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { - result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); + result.setEditableProperties( + mapContainerEditableProperties( + new EditableContainerProperties( + envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); @@ -103,12 +117,13 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); - result.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(envelopedContainer.getValue().data()); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); @@ -120,21 +135,25 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + result.setDeprecation( + DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); } return result; } - private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties, Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties( + final ContainerProperties gmsProperties, Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - propertiesResult.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + propertiesResult.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } if (gmsProperties.hasQualifiedName()) { propertiesResult.setQualifiedName(gmsProperties.getQualifiedName().toString()); @@ -143,10 +162,11 @@ private static com.linkedin.datahub.graphql.generated.ContainerProperties mapCon return propertiesResult; } - private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( - final EditableContainerProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = - new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties + mapContainerEditableProperties(final EditableContainerProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.ContainerEditableProperties + editableContainerProperties = + new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } @@ -158,5 +178,5 @@ private static DataPlatform mapPlatform(final DataPlatformInstance platformInsta return dummyPlatform; } - private ContainerMapper() { } + private ContainerMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 285a119be0d43..371cf6b280c20 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpgroup; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -7,8 +12,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -27,8 +30,8 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -42,155 +45,193 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - -public class CorpGroupType implements SearchableEntityType, MutableType { - - private final EntityClient _entityClient; - - public CorpGroupType(final EntityClient entityClient) { - _entityClient = entityClient; +public class CorpGroupType + implements SearchableEntityType, + MutableType { + + private final EntityClient _entityClient; + + public CorpGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return CorpGroup.class; + } + + public Class inputClass() { + return CorpGroupUpdateInput.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map corpGroupMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>(corpGroupUrns), + null, + context.getAuthentication()); + + final List results = new ArrayList<>(); + for (Urn urn : corpGroupUrns) { + results.add(corpGroupMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpGroup -> + gmsCorpGroup == null + ? null + : DataFetcherResult.newResult() + .data(CorpGroupMapper.map(gmsCorpGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load CorpGroup", e); } - - @Override - public Class objectClass() { - return CorpGroup.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpGroup", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public CorpGroup update( + @Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Urn groupUrn = Urn.createFromString(urn); + Map gmsResponse = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + ImmutableSet.of(groupUrn), + ImmutableSet.of(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), + context.getAuthentication()); + + CorpGroupEditableInfo existingCorpGroupEditableInfo = null; + if (gmsResponse.containsKey(groupUrn) + && gmsResponse + .get(groupUrn) + .getAspects() + .containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { + existingCorpGroupEditableInfo = + new CorpGroupEditableInfo( + gmsResponse + .get(groupUrn) + .getAspects() + .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); } - - public Class inputClass() { - return CorpGroupUpdateInput.class; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpGroupUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final CorpGroupUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getDescription() != null) { + // Requires the Update Docs privilege. + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { + // Requires the Update Contact info privilege. + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); } - @Override - public EntityType type() { - return EntityType.CORP_GROUP; - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpGroupUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpGroupMap = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(corpGroupUrns), null, context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpGroupUrns) { - results.add(corpGroupMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpGroup -> gmsCorpGroup == null ? null - : DataFetcherResult.newResult().data(CorpGroupMapper.map(gmsCorpGroup)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load CorpGroup", e); - } - } + private RecordTemplate mapCorpGroupEditableInfo( + CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { + CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult - searchResult = _entityClient.search("corpGroup", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpGroup", query, filters, limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - @Override - public CorpGroup update(@Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Urn groupUrn = Urn.createFromString(urn); - Map gmsResponse = - _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, ImmutableSet.of(groupUrn), ImmutableSet.of( - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), - context.getAuthentication()); - - CorpGroupEditableInfo existingCorpGroupEditableInfo = null; - if (gmsResponse.containsKey(groupUrn) && gmsResponse.get(groupUrn).getAspects().containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { - existingCorpGroupEditableInfo = new CorpGroupEditableInfo(gmsResponse.get(groupUrn).getAspects() - .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME).getValue().data()); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorizedToUpdate(String urn, CorpGroupUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpGroupUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getDescription() != null) { - // Requires the Update Docs privilege. - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { - // Requires the Update Contact info privilege. - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } - - private RecordTemplate mapCorpGroupEditableInfo(CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { - CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - - if (input.getDescription() != null) { - result.setDescription(input.getDescription()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - return result; + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java index c1cd33b0077f6..318506d9d61fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpgroup; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpGroupUrn; +import java.net.URISyntaxException; public class CorpGroupUtils { - private CorpGroupUtils() { } + private CorpGroupUtils() {} - public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpGroupUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); - } + public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpGroupUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index f476794bc545e..a6e14535cf0b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -3,28 +3,32 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupEditablePropertiesMapper implements ModelMapper { +public class CorpGroupEditablePropertiesMapper + implements ModelMapper< + com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { - public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); + public static final CorpGroupEditablePropertiesMapper INSTANCE = + new CorpGroupEditablePropertiesMapper(); - public static CorpGroupEditableProperties map(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public static CorpGroupEditableProperties map( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { return INSTANCE.apply(corpGroupEditableInfo); } @Override - public CorpGroupEditableProperties apply(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public CorpGroupEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 3d2d4aea2b001..04d0cc8ce94e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,48 +1,58 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; -import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupInfoMapper implements ModelMapper { +public class CorpGroupInfoMapper + implements ModelMapper { - public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); + public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); - public static CorpGroupInfo map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); - } + public static CorpGroupInfo map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(corpGroupInfo); + } - @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { - final CorpGroupInfo result = new CorpGroupInfo(); - result.setEmail(info.getEmail()); - result.setDescription(info.getDescription()); - result.setDisplayName(info.getDisplayName()); - if (info.hasAdmins()) { - result.setAdmins(info.getAdmins().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasMembers()) { - result.setMembers(info.getMembers().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasGroups()) { - result.setGroups(info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); - } - return result; + @Override + public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + final CorpGroupInfo result = new CorpGroupInfo(); + result.setEmail(info.getEmail()); + result.setDescription(info.getDescription()); + result.setDisplayName(info.getDisplayName()); + if (info.hasAdmins()) { + result.setAdmins( + info.getAdmins().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasMembers()) { + result.setMembers( + info.getMembers().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasGroups()) { + result.setGroups( + info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 0fb1b66c644d7..52e200d19923a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; @@ -16,78 +18,79 @@ import com.linkedin.metadata.key.CorpGroupKey; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpGroupMapper implements ModelMapper { - public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); + public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { - final CorpGroup result = new CorpGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + final CorpGroup result = new CorpGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); - if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { - mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); - } else { - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - result.setOrigin(mappedGroupOrigin); - } - return mappingHelper.getResult(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); + mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); + mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { + mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); + } else { + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + result.setOrigin(mappedGroupOrigin); } + return mappingHelper.getResult(); + } - private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); - corpGroup.setName(corpGroupKey.getName()); - } + private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); + corpGroup.setName(corpGroupKey.getName()); + } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); - } + private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - corpGroup.setEditableProperties(CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); - } + private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + corpGroup.setEditableProperties( + CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + } - private void mapOwnership(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); - } + private void mapOwnership( + @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + } - private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - Origin groupOrigin = new Origin(dataMap); - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - if (groupOrigin.hasType()) { - mappedGroupOrigin.setType( - com.linkedin.datahub.graphql.generated.OriginType.valueOf(groupOrigin.getType().toString())); - } else { - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - } - if (groupOrigin.hasExternalType()) { - mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); - } - corpGroup.setOrigin(mappedGroupOrigin); + private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + Origin groupOrigin = new Origin(dataMap); + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + if (groupOrigin.hasType()) { + mappedGroupOrigin.setType( + com.linkedin.datahub.graphql.generated.OriginType.valueOf( + groupOrigin.getType().toString())); + } else { + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + } + if (groupOrigin.hasExternalType()) { + mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); } + corpGroup.setOrigin(mappedGroupOrigin); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 266d8be67cb06..29d0482863971 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -3,19 +3,20 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupPropertiesMapper implements ModelMapper { +public class CorpGroupPropertiesMapper + implements ModelMapper { public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); - public static CorpGroupProperties map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + public static CorpGroupProperties map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { return INSTANCE.apply(corpGroupInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index db2b49c790f57..5749eef970fce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; @@ -8,8 +13,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; @@ -29,8 +32,8 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -45,176 +48,206 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class CorpUserType implements SearchableEntityType, MutableType { +public class CorpUserType + implements SearchableEntityType, MutableType { + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = featureFlags; + } + + @Override + public Class objectClass() { + return CorpUser.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_USER; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpUserUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map corpUserMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>(corpUserUrns), + null, + context.getAuthentication()); + + final List results = new ArrayList<>(); + for (Urn urn : corpUserUrns) { + results.add(corpUserMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpUser -> + gmsCorpUser == null + ? null + : DataFetcherResult.newResult() + .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); + } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpuser", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + public Class inputClass() { + return CorpUserUpdateInput.class; + } + + @Override + public CorpUser update( + @Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Optional existingCorpUserEditableInfo = + _entityClient.getVersionedAspect( + urn, + CORP_USER_EDITABLE_INFO_NAME, + 0L, + CorpUserEditableInfo.class, + context.getAuthentication()); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_USER_EDITABLE_INFO_NAME, + mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpUserUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + + // Either the updating actor is the user, or the actor has privileges to update the user + // information. + return context.getActorUrn().equals(urn) + || AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getSlack() != null + || updateInput.getEmail() != null + || updateInput.getPhone() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); + } else if (updateInput.getAboutMe() != null + || updateInput.getDisplayName() != null + || updateInput.getPictureLink() != null + || updateInput.getTeams() != null + || updateInput.getTitle() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); + } - private final EntityClient _entityClient; - private final FeatureFlags _featureFlags; + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { - _entityClient = entityClient; - _featureFlags = featureFlags; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public Class objectClass() { - return CorpUser.class; + private RecordTemplate mapCorpUserEditableInfo( + CorpUserUpdateInput input, Optional existing) { + CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public EntityType type() { - return EntityType.CORP_USER; + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpUserUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpUserMap = _entityClient - .batchGetV2(CORP_USER_ENTITY_NAME, new HashSet<>(corpUserUrns), null, - context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpUserUrns) { - results.add(corpUserMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpUser -> gmsCorpUser == null ? null - : DataFetcherResult.newResult().data(CorpUserMapper.map(gmsCorpUser, _featureFlags)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search("corpuser", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getSkills() != null) { + result.setSkills(new StringArray(input.getSkills())); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getTeams() != null) { + result.setTeams(new StringArray(input.getTeams())); } - - public Class inputClass() { - return CorpUserUpdateInput.class; + if (input.getTitle() != null) { + result.setTitle(input.getTitle()); } - - @Override - public CorpUser update(@Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Optional existingCorpUserEditableInfo = - _entityClient.getVersionedAspect(urn, CORP_USER_EDITABLE_INFO_NAME, 0L, CorpUserEditableInfo.class, - context.getAuthentication()); - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_USER_EDITABLE_INFO_NAME, mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (input.getPhone() != null) { + result.setPhone(input.getPhone()); } - - private boolean isAuthorizedToUpdate(String urn, CorpUserUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - - // Either the updating actor is the user, or the actor has privileges to update the user information. - return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getSlack() != null - || updateInput.getEmail() != null - || updateInput.getPhone() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } else if (updateInput.getAboutMe() != null - || updateInput.getDisplayName() != null - || updateInput.getPictureLink() != null - || updateInput.getTeams() != null - || updateInput.getTitle() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } - private RecordTemplate mapCorpUserEditableInfo(CorpUserUpdateInput input, Optional existing) { - CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getPictureLink() != null) { - result.setPictureLink(new Url(input.getPictureLink())); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getSkills() != null) { - result.setSkills(new StringArray(input.getSkills())); - } - if (input.getTeams() != null) { - result.setTeams(new StringArray(input.getTeams())); - } - if (input.getTitle() != null) { - result.setTitle(input.getTitle()); - } - if (input.getPhone() != null) { - result.setPhone(input.getPhone()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java index 0b5b40c3117e0..9cf8da69281a9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpuser; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpuserUrn; +import java.net.URISyntaxException; public class CorpUserUtils { - private CorpUserUtils() { } + private CorpUserUtils() {} - public static CorpuserUrn getCorpUserUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpuserUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpUserUrn from string %s", urnStr), e); - } + public static CorpuserUrn getCorpUserUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpuserUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpUserUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 2a9f0efd69bcc..3ee353293393e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -2,36 +2,38 @@ import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserEditableInfoMapper implements ModelMapper { +public class CorpUserEditableInfoMapper + implements ModelMapper { - public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); + public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); - public static CorpUserEditableProperties map(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); - } + public static CorpUserEditableProperties map( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + return INSTANCE.apply(info); + } - @Override - public CorpUserEditableProperties apply(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - final CorpUserEditableProperties result = new CorpUserEditableProperties(); - result.setDisplayName(info.getDisplayName()); - result.setTitle(info.getTitle()); - result.setAboutMe(info.getAboutMe()); - result.setSkills(info.getSkills()); - result.setTeams(info.getTeams()); - result.setEmail(info.getEmail()); - result.setPhone(info.getPhone()); - result.setSlack(info.getSlack()); - if (info.hasPictureLink()) { - result.setPictureLink(info.getPictureLink().toString()); - } - return result; + @Override + public CorpUserEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + final CorpUserEditableProperties result = new CorpUserEditableProperties(); + result.setDisplayName(info.getDisplayName()); + result.setTitle(info.getTitle()); + result.setAboutMe(info.getAboutMe()); + result.setSkills(info.getSkills()); + result.setTeams(info.getTeams()); + result.setEmail(info.getEmail()); + result.setPhone(info.getPhone()); + result.setSlack(info.getSlack()); + if (info.hasPictureLink()) { + result.setPictureLink(info.getPictureLink().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 96f60c08cd7c2..9044f4d510bcf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -3,38 +3,38 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserInfoMapper implements ModelMapper { +public class CorpUserInfoMapper + implements ModelMapper { - public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); + public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); - } + public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(corpUserInfo); + } - @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { - final CorpUserInfo result = new CorpUserInfo(); - result.setActive(info.isActive()); - result.setCountryCode(info.getCountryCode()); - result.setDepartmentId(info.getDepartmentId()); - result.setDepartmentName(info.getDepartmentName()); - result.setEmail(info.getEmail()); - result.setDisplayName(info.getDisplayName()); - result.setFirstName(info.getFirstName()); - result.setLastName(info.getLastName()); - result.setFullName(info.getFullName()); - result.setTitle(info.getTitle()); - if (info.hasManagerUrn()) { - result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); - } - return result; + @Override + public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + final CorpUserInfo result = new CorpUserInfo(); + result.setActive(info.isActive()); + result.setCountryCode(info.getCountryCode()); + result.setDepartmentId(info.getDepartmentId()); + result.setDepartmentName(info.getDepartmentName()); + result.setEmail(info.getEmail()); + result.setDisplayName(info.getDisplayName()); + result.setFirstName(info.getFirstName()); + result.setLastName(info.getLastName()); + result.setFullName(info.getFullName()); + result.setTitle(info.getTitle()); + if (info.hasManagerUrn()) { + result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index adcfb91c9cdf2..98783131a2d52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -26,120 +28,134 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpUserMapper { - public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); - } - - public static CorpUser map(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); - } - - public CorpUser apply(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - final CorpUser result = new CorpUser(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_USER); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); - mappingHelper.mapToResult(CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> + public static final CorpUserMapper INSTANCE = new CorpUserMapper(); + + public static CorpUser map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse, null); + } + + public static CorpUser map( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(entityResponse, featureFlags); + } + + public CorpUser apply( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + final CorpUser result = new CorpUser(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_USER); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + mappingHelper.mapToResult( + CORP_USER_EDITABLE_INFO_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setEditableProperties( + CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (corpUser, dataMap) -> corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, - (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); - mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); - - mapCorpUserSettings(result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); - - return mappingHelper.getResult(); + mappingHelper.mapToResult( + CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + + mapCorpUserSettings( + result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); + + return mappingHelper.getResult(); + } + + private void mapCorpUserSettings( + @Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { + CorpUserSettings corpUserSettings = new CorpUserSettings(); + if (envelopedAspect != null) { + corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); } + com.linkedin.datahub.graphql.generated.CorpUserSettings result = + new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - private void mapCorpUserSettings(@Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { - CorpUserSettings corpUserSettings = new CorpUserSettings(); - if (envelopedAspect != null) { - corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); - } - com.linkedin.datahub.graphql.generated.CorpUserSettings result = - new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - - // Map Appearance Settings -- Appearance settings always exist. - result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); + // Map Appearance Settings -- Appearance settings always exist. + result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - // Map Views Settings. - if (corpUserSettings.hasViews()) { - result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); - } - - corpUser.setSettings(result); + // Map Views Settings. + if (corpUserSettings.hasViews()) { + result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); } - @Nonnull - private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( - @Nonnull final CorpUserSettings corpUserSettings, - @Nullable final FeatureFlags featureFlags - ) { - CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); - if (featureFlags != null) { - appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); - } else { - appearanceResult.setShowSimplifiedHomepage(false); - } - - if (corpUserSettings.hasAppearance()) { - appearanceResult.setShowSimplifiedHomepage(corpUserSettings.getAppearance().isShowSimplifiedHomepage()); - } - return appearanceResult; + corpUser.setSettings(result); + } + + @Nonnull + private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( + @Nonnull final CorpUserSettings corpUserSettings, @Nullable final FeatureFlags featureFlags) { + CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); + if (featureFlags != null) { + appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); + } else { + appearanceResult.setShowSimplifiedHomepage(false); } - @Nonnull - private CorpUserViewsSettings mapCorpUserViewsSettings(@Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { - CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); - - if (viewsSettings.hasDefaultView()) { - final DataHubView unresolvedView = new DataHubView(); - unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); - unresolvedView.setType(EntityType.DATAHUB_VIEW); - viewsResult.setDefaultView(unresolvedView); - } - - return viewsResult; - } - - private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserKey corpUserKey = new CorpUserKey(dataMap); - corpUser.setUsername(corpUserKey.getUsername()); + if (corpUserSettings.hasAppearance()) { + appearanceResult.setShowSimplifiedHomepage( + corpUserSettings.getAppearance().isShowSimplifiedHomepage()); } - - private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); - CorpUserProperties corpUserProperties = corpUser.getProperties(); - if (corpUserInfo.hasCustomProperties()) { - corpUserProperties.setCustomProperties(CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); - } - corpUser.setProperties(corpUserProperties); + return appearanceResult; + } + + @Nonnull + private CorpUserViewsSettings mapCorpUserViewsSettings( + @Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { + CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); + + if (viewsSettings.hasDefaultView()) { + final DataHubView unresolvedView = new DataHubView(); + unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); + unresolvedView.setType(EntityType.DATAHUB_VIEW); + viewsResult.setDefaultView(unresolvedView); } - private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); - boolean isNativeUser = - corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); - corpUser.setIsNativeUser(isNativeUser); + return viewsResult; + } + + private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserKey corpUserKey = new CorpUserKey(dataMap); + corpUser.setUsername(corpUserKey.getUsername()); + } + + private void mapCorpUserInfo( + @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); + corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + CorpUserProperties corpUserProperties = corpUser.getProperties(); + if (corpUserInfo.hasCustomProperties()) { + corpUserProperties.setCustomProperties( + CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); } + corpUser.setProperties(corpUserProperties); + } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null + && corpUserCredentials.hasSalt() + && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index c64406a74733b..106e3de661201 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -3,18 +3,16 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ -public class CorpUserPropertiesMapper implements ModelMapper { +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ +public class CorpUserPropertiesMapper + implements ModelMapper { public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); - public static CorpUserProperties map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + public static CorpUserProperties map( + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { return INSTANCE.apply(corpUserInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index d0644fbfdacec..dd9e465a2d4ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -2,14 +2,15 @@ import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class CorpUserStatusMapper implements ModelMapper { +public class CorpUserStatusMapper + implements ModelMapper { public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); - public static CorpUserStatus map(@Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { + public static CorpUserStatus map( + @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { return INSTANCE.apply(corpUserStatus); } @@ -18,4 +19,4 @@ public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 104c7c004cb66..d01f9b3945dc3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dashboard; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -55,191 +58,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DashboardType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DASHBOARD_KEY_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME, + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); -public class DashboardType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DASHBOARD_KEY_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME, - EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); - - private final EntityClient _entityClient; - - public DashboardType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return DashboardUpdateInput.class; - } + public DashboardType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DASHBOARD; - } + @Override + public Class inputClass() { + return DashboardUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.DASHBOARD; + } - @Override - public Class objectClass() { - return Dashboard.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dashboardMap = - _entityClient.batchGetV2( - Constants.DASHBOARD_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dashboardMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDashboard -> gmsDashboard == null ? null : DataFetcherResult.newResult() - .data(DashboardMapper.map(gmsDashboard)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Dashboards", e); - } - } + @Override + public Class objectClass() { + return Dashboard.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dashboard", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dashboardMap = + _entityClient.batchGetV2( + Constants.DASHBOARD_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dashboardMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDashboard -> + gmsDashboard == null + ? null + : DataFetcherResult.newResult() + .data(DashboardMapper.map(gmsDashboard)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Dashboards", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dashboard", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { - try { - return DashboardUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); - } - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - @Override - public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DashboardUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { + try { + return DashboardUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); } + } + + @Override + public Dashboard update( + @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DashboardUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final DashboardUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DashboardUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 432624ac4699f..704d2ae308c1a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -26,13 +28,13 @@ import com.linkedin.datahub.graphql.types.chart.mappers.InputFieldsMapper; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -49,161 +51,202 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DashboardMapper implements ModelMapper { - public static final DashboardMapper INSTANCE = new DashboardMapper(); - - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { - final Dashboard result = new Dashboard(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DASHBOARD); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> + public static final DashboardMapper INSTANCE = new DashboardMapper(); + + public static Dashboard map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + final Dashboard result = new Dashboard(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DASHBOARD); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final DashboardKey gmsKey = new DashboardKey(dataMap); - dashboard.setDashboardId(gmsKey.getDashboardId()); - dashboard.setTool(gmsKey.getDashboardTool()); - dashboard.setPlatform(DataPlatform.builder() + return mappingHelper.getResult(); + } + + private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final DashboardKey gmsKey = new DashboardKey(dataMap); + dashboard.setDashboardId(gmsKey.getDashboardId()); + dashboard.setTool(gmsKey.getDashboardTool()); + dashboard.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapDashboardInfo( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = + new com.linkedin.dashboard.DashboardInfo(dataMap); + dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link + * DashboardInfo} + */ + private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardInfo result = new DashboardInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + result.setCharts( + info.getCharts().stream() + .map( + urn -> { + final Chart chart = new Chart(); + chart.setUrn(urn.toString()); + return chart; + }) + .collect(Collectors.toList())); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapDashboardInfo(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link DashboardInfo} - */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardInfo result = new DashboardInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - result.setCharts(info.getCharts().stream().map(urn -> { - final Chart chart = new Chart(); - chart.setUrn(urn.toString()); - return chart; - }).collect(Collectors.toList())); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link DashboardProperties} - */ - private DashboardProperties mapDashboardInfoToProperties(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardProperties result = new DashboardProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); } - - private void mapEditableDashboardProperties(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(dataMap); - final DashboardEditableProperties dashboardEditableProperties = new DashboardEditableProperties(); - dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); - dashboard.setEditableProperties(dashboardEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link + * DashboardProperties} + */ + private DashboardProperties mapDashboardInfoToProperties( + final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardProperties result = new DashboardProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapGlobalTags(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dashboard.setGlobalTags(globalTags); - dashboard.setTags(globalTags); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dashboard.setContainer(Container - .builder() + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + return result; + } + + private void mapEditableDashboardProperties( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(dataMap); + final DashboardEditableProperties dashboardEditableProperties = + new DashboardEditableProperties(); + dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); + dashboard.setEditableProperties(dashboardEditableProperties); + } + + private void mapGlobalTags( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dashboard.setGlobalTags(globalTags); + dashboard.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dashboard.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); - } + private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index f084dbc0bc09f..6212663ee87e4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,67 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class DashboardUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); + public static Collection map( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dashboardUpdateInput, actor); + } -public class DashboardUpdateInputMapper implements - InputModelMapper, Urn> { - public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); - - public static Collection map(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); - } + @Override + public Collection apply( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - @Override - public Collection apply(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - - if (dashboardUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } - - if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dashboardUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } else { - // Tags override global tags - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dashboardUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dashboardUpdateInput.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); - editableDashboardProperties.setDescription(dashboardUpdateInput.getEditableProperties().getDescription()); - if (!editableDashboardProperties.hasCreated()) { - editableDashboardProperties.setCreated(auditStamp); - } - editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); - } + if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dashboardUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } else { + // Tags override global tags + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dashboardUpdateInput.getEditableProperties() != null) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); + editableDashboardProperties.setDescription( + dashboardUpdateInput.getEditableProperties().getDescription()); + if (!editableDashboardProperties.hasCreated()) { + editableDashboardProperties.setCreated(auditStamp); + } + editableDashboardProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index d257aef4be565..782ec3d3a6c07 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); @@ -18,8 +17,10 @@ public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect enveloped @Override public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), com.linkedin.dashboard.DashboardUsageStatistics.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + com.linkedin.dashboard.DashboardUsageStatistics.class); final com.linkedin.datahub.graphql.generated.DashboardUsageMetrics dashboardUsageMetrics = new com.linkedin.datahub.graphql.generated.DashboardUsageMetrics(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 54f7660064c05..6ec1979cd090d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataflow; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataFlowType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_FLOW_KEY_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); + private final EntityClient _entityClient; -public class DataFlowType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_FLOW_KEY_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); - private final EntityClient _entityClient; - - public DataFlowType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataFlowType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_FLOW; - } + @Override + public EntityType type() { + return EntityType.DATA_FLOW; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataFlow.class; - } + @Override + public Class objectClass() { + return DataFlow.class; + } - @Override - public Class inputClass() { - return DataFlowUpdateInput.class; - } - - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataFlowMap = - _entityClient.batchGetV2( - Constants.DATA_FLOW_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataFlowMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataFlow -> gmsDataFlow == null ? null : DataFetcherResult.newResult() - .data(DataFlowMapper.map(gmsDataFlow)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Flows", e); - } - } + @Override + public Class inputClass() { + return DataFlowUpdateInput.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dataFlow", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dataFlowMap = + _entityClient.batchGetV2( + Constants.DATA_FLOW_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataFlowMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataFlow -> + gmsDataFlow == null + ? null + : DataFetcherResult.newResult() + .data(DataFlowMapper.map(gmsDataFlow)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Flows", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataFlow", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) throws Exception { + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataFlowUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + DataFlowUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public DataFlow update( + @Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) + throws Exception { - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DataFlowUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 719fa9f0b2bf0..165fae81527ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -17,12 +19,12 @@ import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -38,120 +40,147 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataFlowMapper implements ModelMapper { - public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { - final DataFlow result = new DataFlow(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_FLOW); - Urn entityUrn = entityResponse.getUrn(); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> + public static final DataFlowMapper INSTANCE = new DataFlowMapper(); + + public static DataFlow map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + final DataFlow result = new DataFlow(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_FLOW); + Urn entityUrn = entityResponse.getUrn(); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } + return mappingHelper.getResult(); + } - private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final DataFlowKey gmsKey = new DataFlowKey(dataMap); - dataFlow.setOrchestrator(gmsKey.getOrchestrator()); - dataFlow.setFlowId(gmsKey.getFlowId()); - dataFlow.setCluster(gmsKey.getCluster()); - dataFlow.setPlatform(DataPlatform.builder() + private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final DataFlowKey gmsKey = new DataFlowKey(dataMap); + dataFlow.setOrchestrator(gmsKey.getOrchestrator()); + dataFlow.setFlowId(gmsKey.getFlowId()); + dataFlow.setCluster(gmsKey.getCluster()); + dataFlow.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getOrchestrator()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } - - private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = new com.linkedin.datajob.DataFlowInfo(dataMap); - dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); - dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); - } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} - */ - private DataFlowInfo mapDataFlowInfo(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowInfo result = new DataFlowInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getOrchestrator()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = + new com.linkedin.datajob.DataFlowInfo(dataMap); + dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); + dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} + */ + private DataFlowInfo mapDataFlowInfo( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowInfo result = new DataFlowInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} - */ - private DataFlowProperties mapDataFlowInfoToProperties(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowProperties result = new DataFlowProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(dataMap); - final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); - dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); - dataFlow.setEditableProperties(dataFlowEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} + */ + private DataFlowProperties mapDataFlowInfoToProperties( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowProperties result = new DataFlowProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - private void mapGlobalTags(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataFlow.setGlobalTags(globalTags); - dataFlow.setTags(globalTags); - } - - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } + return result; + } + + private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(dataMap); + final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); + dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); + dataFlow.setEditableProperties(dataFlowEditableProperties); + } + + private void mapGlobalTags( + @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataFlow.setGlobalTags(globalTags); + dataFlow.setTags(globalTags); + } + + private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index c966fc8338ed4..87579a15d586e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,22 +19,18 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowUpdateInputMapper implements InputModelMapper, Urn> { +public class DataFlowUpdateInputMapper + implements InputModelMapper, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); - public static Collection map(@Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + public static Collection map( + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(dataFlowUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +39,8 @@ public Collection apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( - updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,28 +49,29 @@ public Collection apply( if (dataFlowUpdateInput.getGlobalTags() != null) { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (dataFlowUpdateInput.getEditableProperties() != null) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(); - editableDataFlowProperties.setDescription(dataFlowUpdateInput.getEditableProperties().getDescription()); + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(); + editableDataFlowProperties.setDescription( + dataFlowUpdateInput.getEditableProperties().getDescription()); editableDataFlowProperties.setCreated(auditStamp); editableDataFlowProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataFlowProperties, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataFlowProperties, EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index f6f37978bb36a..6e71584007504 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.datajob; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataJobType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_JOB_KEY_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("flow"); + private final EntityClient _entityClient; -public class DataJobType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_JOB_KEY_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("flow"); - private final EntityClient _entityClient; - - public DataJobType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataJobType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_JOB; - } + @Override + public EntityType type() { + return EntityType.DATA_JOB; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataJob.class; - } + @Override + public Class objectClass() { + return DataJob.class; + } - @Override - public Class inputClass() { - return DataJobUpdateInput.class; - } + @Override + public Class inputClass() { + return DataJobUpdateInput.class; + } - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataJobMap = _entityClient.batchGetV2( - Constants.DATA_JOB_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataJobMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataJob -> gmsDataJob == null ? null : DataFetcherResult.newResult() - .data(DataJobMapper.map(gmsDataJob)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Jobs", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "dataJob", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dataJobMap = + _entityClient.batchGetV2( + Constants.DATA_JOB_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataJobMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataJob -> + gmsDataJob == null + ? null + : DataFetcherResult.newResult() + .data(DataJobMapper.map(gmsDataJob)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Jobs", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataJob", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataJobUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public DataJob update( + @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DataJobUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 61802ad9cfe5c..0d0e7a613c8d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -21,13 +23,13 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -42,143 +44,164 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataJobMapper implements ModelMapper { - public static final DataJobMapper INSTANCE = new DataJobMapper(); + public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static DataJob map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { - final DataJob result = new DataJob(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public DataJob apply(@Nonnull final EntityResponse entityResponse) { + final DataJob result = new DataJob(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_JOB); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_JOB); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - entityResponse.getAspects().forEach((name, aspect) -> { - DataMap data = aspect.getValue().data(); - if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { + entityResponse + .getAspects() + .forEach( + (name, aspect) -> { + DataMap data = aspect.getValue().data(); + if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow(new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); result.setJobId(gmsKey.getJobId()); - } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = new com.linkedin.datajob.DataJobInfo(data); + } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = + new com.linkedin.datajob.DataJobInfo(data); result.setInfo(mapDataJobInfo(gmsDataJobInfo, entityUrn)); result.setProperties(mapDataJobInfoToProperties(gmsDataJobInfo, entityUrn)); - } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = new com.linkedin.datajob.DataJobInputOutput(data); + } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = + new com.linkedin.datajob.DataJobInputOutput(data); result.setInputOutput(mapDataJobInputOutput(gmsDataJobInputOutput)); - } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(data); - final DataJobEditableProperties dataJobEditableProperties = new DataJobEditableProperties(); - dataJobEditableProperties.setDescription(editableDataJobProperties.getDescription()); + } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { + final EditableDataJobProperties editableDataJobProperties = + new EditableDataJobProperties(data); + final DataJobEditableProperties dataJobEditableProperties = + new DataJobEditableProperties(); + dataJobEditableProperties.setDescription( + editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); - } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { + } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); - } else if (STATUS_ASPECT_NAME.equals(name)) { + } else if (STATUS_ASPECT_NAME.equals(name)) { result.setStatus(StatusMapper.map(new Status(data))); - } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); - } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); - } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); - } else if (DOMAINS_ASPECT_NAME.equals(name)) { + } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); - } else if (DEPRECATION_ASPECT_NAME.equals(name)) { + } else if (DEPRECATION_ASPECT_NAME.equals(name)) { result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); - } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); - } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { + } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); - } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); - } - }); - - return result; + } + }); + + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ + private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobInfo result = new DataJobInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} - */ - private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobInfo result = new DataJobInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} - */ - private DataJobProperties mapDataJobInfoToProperties(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobProperties result = new DataJobProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} */ + private DataJobProperties mapDataJobInfoToProperties( + final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobProperties result = new DataJobProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private DataJobInputOutput mapDataJobInputOutput( + final com.linkedin.datajob.DataJobInputOutput inputOutput) { + final DataJobInputOutput result = new DataJobInputOutput(); + if (inputOutput.hasInputDatasets()) { + result.setInputDatasets( + inputOutput.getInputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatasets(ImmutableList.of()); + } + if (inputOutput.hasOutputDatasets()) { + result.setOutputDatasets( + inputOutput.getOutputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setOutputDatasets(ImmutableList.of()); + } + if (inputOutput.hasInputDatajobs()) { + result.setInputDatajobs( + inputOutput.getInputDatajobs().stream() + .map( + urn -> { + final DataJob dataJob = new DataJob(); + dataJob.setUrn(urn.toString()); + return dataJob; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatajobs(ImmutableList.of()); } - private DataJobInputOutput mapDataJobInputOutput(final com.linkedin.datajob.DataJobInputOutput inputOutput) { - final DataJobInputOutput result = new DataJobInputOutput(); - if (inputOutput.hasInputDatasets()) { - result.setInputDatasets(inputOutput.getInputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setInputDatasets(ImmutableList.of()); - } - if (inputOutput.hasOutputDatasets()) { - result.setOutputDatasets(inputOutput.getOutputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setOutputDatasets(ImmutableList.of()); - } - if (inputOutput.hasInputDatajobs()) { - result.setInputDatajobs(inputOutput.getInputDatajobs().stream().map(urn -> { - final DataJob dataJob = new DataJob(); - dataJob.setUrn(urn.toString()); - return dataJob; - }).collect(Collectors.toList())); - } else { - result.setInputDatajobs(ImmutableList.of()); - } - - if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { - result.setFineGrainedLineages(FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); - } - - return result; + if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { + result.setFineGrainedLineages( + FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b075c42d411fb..b0f299e00b4ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,63 +19,61 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobUpdateInputMapper implements InputModelMapper, Urn> { - public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); +public class DataJobUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); - public static Collection map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); - } - - @Override - public Collection apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); + public static Collection map( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dataJobUpdateInput, actor); + } - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); + @Override + public Collection apply( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); - if (dataJobUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dataJobUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } else { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dataJobUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dataJobUpdateInput.getEditableProperties() != null) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); - editableDataJobProperties.setDescription(dataJobUpdateInput.getEditableProperties().getDescription()); - editableDataJobProperties.setCreated(auditStamp); - editableDataJobProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataJobProperties, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); - } + if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dataJobUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } else { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dataJobUpdateInput.getEditableProperties() != null) { + final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); + editableDataJobProperties.setDescription( + dataJobUpdateInput.getEditableProperties().getDescription()); + editableDataJobProperties.setCreated(auditStamp); + editableDataJobProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataJobProperties, EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); } + + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 57a035d136645..567d275dbee0a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,56 +19,60 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformType implements EntityType { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public DataPlatformType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataPlatformType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public Class objectClass() { - return DataPlatform.class; - } + @Override + public Class objectClass() { + return DataPlatform.class; + } - @Override - public List> batchLoad(final List urns, final QueryContext context) { + @Override + public List> batchLoad( + final List urns, final QueryContext context) { - final List dataPlatformUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final List dataPlatformUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map dataPlatformMap = _entityClient.batchGetV2( - DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), null, context.getAuthentication()); + try { + final Map dataPlatformMap = + _entityClient.batchGetV2( + DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + null, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformUrns) { - gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformUrns) { + gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); + } - return gmsResults.stream() - .map(gmsPlatform -> gmsPlatform == null ? null - : DataFetcherResult.newResult() - .data(DataPlatformMapper.map(gmsPlatform)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Platforms", e); - } + return gmsResults.stream() + .map( + gmsPlatform -> + gmsPlatform == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformMapper.map(gmsPlatform)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Platforms", e); } + } - @Override - public com.linkedin.datahub.graphql.generated.EntityType type() { - return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; - } + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index 011fb83cddb33..c2dc3bfabd07c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -6,25 +6,27 @@ import javax.annotation.Nonnull; @Deprecated -public class DataPlatformInfoMapper implements ModelMapper { +public class DataPlatformInfoMapper + implements ModelMapper { - public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); + public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); - public static DataPlatformInfo map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); - } + public static DataPlatformInfo map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformInfo result = new DataPlatformInfo(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.hasDisplayName()) { - result.setDisplayName(input.getDisplayName()); - } - if (input.hasLogoUrl()) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + @Override + public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformInfo result = new DataPlatformInfo(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.hasDisplayName()) { + result.setDisplayName(input.getDisplayName()); + } + if (input.hasLogoUrl()) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index 8df44e8f6e9e9..f7078f9f37d7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -13,36 +15,40 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformMapper implements ModelMapper { - public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); - } - - @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { - final DataPlatform result = new DataPlatform(); - final DataPlatformKey dataPlatformKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKeyInternal(entityResponse.getUrn(), - new DataPlatformKey().schema()); - result.setType(EntityType.DATA_PLATFORM); - Urn urn = entityResponse.getUrn(); - result.setUrn(urn.toString()); - result.setName(dataPlatformKey.getPlatformName()); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PLATFORM_KEY_ASPECT_NAME, (dataPlatform, dataMap) -> + public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); + + public static DataPlatform map(@Nonnull final EntityResponse platform) { + return INSTANCE.apply(platform); + } + + @Override + public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + final DataPlatform result = new DataPlatform(); + final DataPlatformKey dataPlatformKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + entityResponse.getUrn(), new DataPlatformKey().schema()); + result.setType(EntityType.DATA_PLATFORM); + Urn urn = entityResponse.getUrn(); + result.setUrn(urn.toString()); + result.setName(dataPlatformKey.getPlatformName()); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PLATFORM_KEY_ASPECT_NAME, + (dataPlatform, dataMap) -> dataPlatform.setName(new DataPlatformKey(dataMap).getPlatformName())); - mappingHelper.mapToResult(DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> - dataPlatform.setProperties(DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + DATA_PLATFORM_INFO_ASPECT_NAME, + (dataPlatform, dataMap) -> + dataPlatform.setProperties( + DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + return mappingHelper.getResult(); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index c0a236dc1a402..ad6de5505bed6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -5,27 +5,28 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +public class DataPlatformPropertiesMapper + implements ModelMapper { -public class DataPlatformPropertiesMapper implements ModelMapper { + public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); - public static final DataPlatformPropertiesMapper - INSTANCE = new DataPlatformPropertiesMapper(); + public static DataPlatformProperties map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - public static DataPlatformProperties map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + @Override + public DataPlatformProperties apply( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformProperties result = new DataPlatformProperties(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public DataPlatformProperties apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformProperties result = new DataPlatformProperties(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getLogoUrl() != null) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + if (input.getLogoUrl() != null) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 87614e1332528..6519a493f3991 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,19 +12,15 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.dataplatforminstance.mappers.DataPlatformInstanceMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; -import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,90 +28,100 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; - -public class DataPlatformInstanceType implements SearchableEntityType, +public class DataPlatformInstanceType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME - ); - private final EntityClient _entityClient; - - public DataPlatformInstanceType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.DATA_PLATFORM_INSTANCE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return DataPlatformInstance.class; + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataPlatformInstanceType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.DATA_PLATFORM_INSTANCE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataPlatformInstance.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataPlatformInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + new HashSet<>(dataPlatformInstanceUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformInstanceMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to batch load DataPlatformInstance", e); } - - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List dataPlatformInstanceUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, - new HashSet<>(dataPlatformInstanceUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - - } catch (Exception e) { - throw new RuntimeException("Failed to batch load DataPlatformInstance", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, - filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index ba49f23133f9e..1a2bd0488c4bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance.mappers; -import com.linkedin.common.Ownership; +import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; import com.linkedin.common.Status; -import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; -import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; - import javax.annotation.Nonnull; public class DataPlatformInstanceMapper { @@ -41,65 +40,75 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) final EnvelopedAspectMap aspects = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspects, result); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - this::mapDataPlatformInstanceKey - ); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.OWNERSHIP_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.GLOBAL_TAGS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.STATUS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap))) - ); - mappingHelper.mapToResult(Constants.DEPRECATION_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap))) - ); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, this::mapDataPlatformInstanceKey); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.OWNERSHIP_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setOwnership( + OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.GLOBAL_TAGS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.STATUS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + Constants.DEPRECATION_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } - private void mapDataPlatformInstanceKey(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { + private void mapDataPlatformInstanceKey( + @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { final DataPlatformInstanceKey gmsKey = new DataPlatformInstanceKey(dataMap); - dataPlatformInstance.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()) - .build()); + dataPlatformInstance.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); dataPlatformInstance.setInstanceId(gmsKey.getInstance()); } private void mapDataPlatformInstanceProperties( - @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn - ) { - final DataPlatformInstanceProperties gmsProperties = new DataPlatformInstanceProperties(dataMap); + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final DataPlatformInstanceProperties gmsProperties = + new DataPlatformInstanceProperties(dataMap); final com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties properties = - new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); + new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); properties.setName(gmsProperties.getName()); properties.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index ee014f9f66571..48a0cb984862d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.DataProcessInstance; @@ -12,43 +14,43 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DataProcessInstanceMapper implements ModelMapper { - public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); - - public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); + + public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { + final DataProcessInstance result = new DataProcessInstance(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_PROCESS_INSTANCE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + + return mappingHelper.getResult(); + } + + private void mapDataProcessProperties( + @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + DataProcessInstanceProperties dataProcessInstanceProperties = + new DataProcessInstanceProperties(dataMap); + dpi.setName(dataProcessInstanceProperties.getName()); + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); } - - @Override - public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { - final DataProcessInstance result = new DataProcessInstance(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_PROCESS_INSTANCE); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); - - return mappingHelper.getResult(); - } - - private void mapDataProcessProperties(@Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { - DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); - dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); - } - if (dataProcessInstanceProperties.hasExternalUrl()) { - dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); - } + if (dataProcessInstanceProperties.hasExternalUrl()) { + dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index ca9a77f7e45cb..fd60711e8c569 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -6,36 +6,41 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - -public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper { - - public static final DataProcessInstanceRunEventMapper INSTANCE = new DataProcessInstanceRunEventMapper(); - - public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); +public class DataProcessInstanceRunEventMapper + implements TimeSeriesAspectMapper { + + public static final DataProcessInstanceRunEventMapper INSTANCE = + new DataProcessInstanceRunEventMapper(); + + public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + DataProcessInstanceRunEvent runEvent = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + DataProcessInstanceRunEvent.class); + + final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = + new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); + + result.setTimestampMillis(runEvent.getTimestampMillis()); + result.setAttempt(runEvent.getAttempt()); + if (runEvent.hasStatus()) { + result.setStatus( + com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf( + runEvent.getStatus().toString())); } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - DataProcessInstanceRunEvent runEvent = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - DataProcessInstanceRunEvent.class); - - final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = - new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); - - result.setTimestampMillis(runEvent.getTimestampMillis()); - result.setAttempt(runEvent.getAttempt()); - if (runEvent.hasStatus()) { - result.setStatus(com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf(runEvent.getStatus().toString())); - } - if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); - } - - return result; + if (runEvent.hasResult()) { + result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 91b03eea2745f..422bea73925a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -5,30 +5,34 @@ import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +public class DataProcessInstanceRunResultMapper + implements ModelMapper< + DataProcessInstanceRunResult, + com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { -public class DataProcessInstanceRunResultMapper implements ModelMapper< - DataProcessInstanceRunResult, com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { + public static final DataProcessInstanceRunResultMapper INSTANCE = + new DataProcessInstanceRunResultMapper(); - public static final DataProcessInstanceRunResultMapper INSTANCE = new DataProcessInstanceRunResultMapper(); + public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( + @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(input); + } - public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map(@Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); - } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply(@Nonnull final DataProcessInstanceRunResult input) { - - final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = - new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); + @Override + public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( + @Nonnull final DataProcessInstanceRunResult input) { - if (input.hasType()) { - result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); - } + final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); - if (input.hasNativeResultType()) { - result.setNativeResultType(input.getNativeResultType()); - } + if (input.hasType()) { + result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); + } - return result; + if (input.hasNativeResultType()) { + result.setNativeResultType(input.getNativeResultType()); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index eb8ca23f00b37..766f6937ce3e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -1,5 +1,13 @@ package com.linkedin.datahub.graphql.types.dataproduct; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,11 +26,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import lombok.RequiredArgsConstructor; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,26 +33,23 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import org.apache.commons.lang3.NotImplementedException; @RequiredArgsConstructor -public class DataProductType implements SearchableEntityType, - com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DataProductType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -68,13 +68,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { - final List dataProductUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataProductUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATA_PRODUCT_ENTITY_NAME, new HashSet<>(dataProductUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATA_PRODUCT_ENTITY_NAME, + new HashSet<>(dataProductUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -82,8 +86,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataProductMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataProductMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); @@ -91,22 +100,28 @@ public List> batchLoad(@Nonnull List urns } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Data Product entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Data Product entity type"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 254b43ecb96cc..8039ea08dc722 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -21,17 +28,8 @@ import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - - public class DataProductMapper implements ModelMapper { public static final DataProductMapper INSTANCE = new DataProductMapper(); @@ -50,27 +48,44 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, (dataProduct, dataMap) -> - mapDataProductProperties(dataProduct, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setDomain(DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + (dataProduct, dataMap) -> mapDataProductProperties(dataProduct, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + DOMAINS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setDomain( + DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); return result; } - private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { + private void mapDataProductProperties( + @Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { DataProductProperties dataProductProperties = new DataProductProperties(dataMap); - com.linkedin.datahub.graphql.generated.DataProductProperties properties = new com.linkedin.datahub.graphql.generated.DataProductProperties(); + com.linkedin.datahub.graphql.generated.DataProductProperties properties = + new com.linkedin.datahub.graphql.generated.DataProductProperties(); - final String name = dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); + final String name = + dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); properties.setName(name); properties.setDescription(dataProductProperties.getDescription()); if (dataProductProperties.hasExternalUrl()) { @@ -81,7 +96,9 @@ private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull } else { properties.setNumAssets(0); } - properties.setCustomProperties(CustomPropertiesMapper.map(dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); dataProduct.setProperties(properties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 6f339d3985133..badb24810c82b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -8,19 +13,17 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; -import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -37,13 +40,12 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -56,235 +58,266 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetType implements SearchableEntityType, BrowsableEntityType, +public class DatasetType + implements SearchableEntityType, + BrowsableEntityType, BatchMutableType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private static final String ENTITY_NAME = "dataset"; - - private final EntityClient _entityClient; - - public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Dataset.class; + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private static final String ENTITY_NAME = "dataset"; + + private final EntityClient _entityClient; + + public DatasetType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Dataset.class; + } + + @Override + public Class inputClass() { + return DatasetUpdateInput.class; + } + + @Override + public Class batchInputClass() { + return BatchDatasetUpdateInput[].class; + } + + @Override + public EntityType type() { + return EntityType.DATASET; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + @Nonnull final List urnStrs, @Nonnull final QueryContext context) { + try { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map datasetMap = + _entityClient.batchGetV2( + Constants.DATASET_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(datasetMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(DatasetMapper.map(gmsDataset)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } - - @Override - public Class inputClass() { - return DatasetUpdateInput.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public List batchUpdate( + @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { + final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + + final Collection proposals = + Arrays.stream(input) + .map( + updateInput -> { + if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { + Collection datasetProposals = + DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + datasetProposals.forEach( + proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); + return datasetProposals; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + final List urns = + Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } - @Override - public Class batchInputClass() { - return BatchDatasetUpdateInput[].class; + return batchLoad(urns, context).stream() + .map(DataFetcherResult::getData) + .collect(Collectors.toList()); + } + + @Override + public Dataset update( + @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DatasetUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public EntityType type() { - return EntityType.DATASET; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getInstitutionalMemory() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public List> batchLoad(@Nonnull final List urnStrs, - @Nonnull final QueryContext context) { - try { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map datasetMap = - _entityClient.batchGetV2( - Constants.DATASET_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(datasetMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.newResult() - .data(DatasetMapper.map(gmsDataset)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (updateInput.getDeprecation() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "dataset", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); + if (updateInput.getEditableSchemaMetadata() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public List batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); - - final Collection proposals = Arrays.stream(input).map(updateInput -> { - if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { - Collection datasetProposals = DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); - datasetProposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); - return datasetProposals; - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }).flatMap(Collection::stream).collect(Collectors.toList()); - - final List urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); - } - - return batchLoad(urns, context).stream().map(DataFetcherResult::getData).collect(Collectors.toList()); - } - - @Override - public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DatasetUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorized(@Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getInstitutionalMemory() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); - } - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDeprecation() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - if (updateInput.getEditableSchemaMetadata() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java index e1aa580276a50..676617bfa2f90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.types.dataset; import com.linkedin.common.urn.DatasetUrn; - import java.net.URISyntaxException; public class DatasetUtils { - private DatasetUtils() { } + private DatasetUtils() {} - static DatasetUrn getDatasetUrn(String urnStr) { - try { - return DatasetUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); - } + static DatasetUrn getDatasetUrn(String urnStr) { + try { + return DatasetUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index e620bfb30b6b7..df019cc5df8fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -22,32 +24,30 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class VersionedDatasetType implements com.linkedin.datahub.graphql.types.EntityType { +public class VersionedDatasetType + implements com.linkedin.datahub.graphql.types.EntityType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME - ); + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; @@ -74,8 +74,8 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(@Nonnull final List versionedUrns, - @Nonnull final QueryContext context) { + public List> batchLoad( + @Nonnull final List versionedUrns, @Nonnull final QueryContext context) { try { final Map datasetMap = _entityClient.batchGetVersionedV2( @@ -89,9 +89,13 @@ public List> batchLoad(@Nonnull final List gmsDataset == null ? null : DataFetcherResult.newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) - .build()) + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(VersionedDatasetMapper.map(gmsDataset)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Datasets", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 0ec9bed0c8511..5fe7815ea2f8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class AssertionRunEventMapper implements TimeSeriesAspectMapper { @@ -29,8 +28,10 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), AssertionRunEvent.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + AssertionRunEvent.class); final com.linkedin.datahub.graphql.generated.AssertionRunEvent assertionRunEvent = new com.linkedin.datahub.graphql.generated.AssertionRunEvent(); @@ -39,7 +40,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setAssertionUrn(gmsAssertionRunEvent.getAssertionUrn().toString()); assertionRunEvent.setAsserteeUrn(gmsAssertionRunEvent.getAsserteeUrn().toString()); assertionRunEvent.setRunId(gmsAssertionRunEvent.getRunId()); - assertionRunEvent.setStatus(AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); + assertionRunEvent.setStatus( + AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); } @@ -50,7 +52,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { - assertionRunEvent.setRuntimeContext(StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + assertionRunEvent.setRuntimeContext( + StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1adcea7e53dc2..1644e0243a181 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -2,24 +2,25 @@ import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DatasetDeprecationMapper implements ModelMapper { +public class DatasetDeprecationMapper + implements ModelMapper { - public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); + public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 3e39c14c29ede..8296bc8244995 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Access; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -22,6 +24,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; @@ -29,15 +32,14 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,155 +55,196 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class DatasetMapper implements ModelMapper { - public static final DatasetMapper INSTANCE = new DatasetMapper(); - - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); - } - - public Dataset apply(@Nonnull final EntityResponse entityResponse) { - Dataset result = new Dataset(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATASET); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + public static final DatasetMapper INSTANCE = new DatasetMapper(); + + public static Dataset map(@Nonnull final EntityResponse dataset) { + return INSTANCE.apply(dataset); + } + + public Dataset apply(@Nonnull final EntityResponse entityResponse) { + Dataset result = new Dataset(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATASET); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); - mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); - mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + SIBLINGS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + mappingHelper.mapToResult( + UPSTREAM_LINEAGE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setFineGrainedLineages( + UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataset, dataMap) -> dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> - dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final DatasetKey gmsKey = new DatasetKey(dataMap); - dataset.setName(gmsKey.getName()); - dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() + mappingHelper.mapToResult( + ACCESS_DATASET_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } + + private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final DatasetKey gmsKey = new DatasetKey(dataMap); + dataset.setName(gmsKey.getName()); + dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); + dataset.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + .setUrn(gmsKey.getPlatform().toString()) + .build()); + } + + private void mapDatasetProperties( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final DatasetProperties gmsProperties = new DatasetProperties(dataMap); + final com.linkedin.datahub.graphql.generated.DatasetProperties properties = + new com.linkedin.datahub.graphql.generated.DatasetProperties(); + properties.setDescription(gmsProperties.getDescription()); + dataset.setDescription(gmsProperties.getDescription()); + properties.setOrigin(dataset.getOrigin()); + if (gmsProperties.getExternalUrl() != null) { + properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - - private void mapDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final DatasetProperties gmsProperties = new DatasetProperties(dataMap); - final com.linkedin.datahub.graphql.generated.DatasetProperties properties = - new com.linkedin.datahub.graphql.generated.DatasetProperties(); - properties.setDescription(gmsProperties.getDescription()); - dataset.setDescription(gmsProperties.getDescription()); - properties.setOrigin(dataset.getOrigin()); - if (gmsProperties.getExternalUrl() != null) { - properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); - } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); - if (gmsProperties.getName() != null) { - properties.setName(gmsProperties.getName()); - } else { - properties.setName(dataset.getName()); - } - properties.setQualifiedName(gmsProperties.getQualifiedName()); - dataset.setProperties(properties); - dataset.setDescription(properties.getDescription()); - if (gmsProperties.getUri() != null) { - dataset.setUri(gmsProperties.getUri().toString()); - } - TimeStamp created = gmsProperties.getCreated(); - if (created != null) { - properties.setCreated(created.getTime()); - if (created.hasActor()) { - properties.setCreatedActor(created.getActor().toString()); - } - } - TimeStamp lastModified = gmsProperties.getLastModified(); - if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } - } + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + if (gmsProperties.getName() != null) { + properties.setName(gmsProperties.getName()); + } else { + properties.setName(dataset.getName()); } - - private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); - final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); - editableProperties.setDescription(editableDatasetProperties.getDescription()); - dataset.setEditableProperties(editableProperties); + properties.setQualifiedName(gmsProperties.getQualifiedName()); + dataset.setProperties(properties); + dataset.setDescription(properties.getDescription()); + if (gmsProperties.getUri() != null) { + dataset.setUri(gmsProperties.getUri().toString()); } - - private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final ViewProperties properties = new ViewProperties(dataMap); - final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = - new com.linkedin.datahub.graphql.generated.ViewProperties(); - graphqlProperties.setMaterialized(properties.isMaterialized()); - graphqlProperties.setLanguage(properties.getViewLanguage()); - graphqlProperties.setLogic(properties.getViewLogic()); - dataset.setViewProperties(graphqlProperties); + TimeStamp created = gmsProperties.getCreated(); + if (created != null) { + properties.setCreated(created.getTime()); + if (created.hasActor()) { + properties.setCreatedActor(created.getActor().toString()); + } } - - private void mapGlobalTags(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataset.setGlobalTags(globalTags); - dataset.setTags(globalTags); + TimeStamp lastModified = gmsProperties.getLastModified(); + if (lastModified != null) { + properties.setLastModified(lastModified.getTime()); + if (lastModified.hasActor()) { + properties.setLastModifiedActor(lastModified.getActor().toString()); + } } - - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() + } + + private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); + final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); + editableProperties.setDescription(editableDatasetProperties.getDescription()); + dataset.setEditableProperties(editableProperties); + } + + private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final ViewProperties properties = new ViewProperties(dataMap); + final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = + new com.linkedin.datahub.graphql.generated.ViewProperties(); + graphqlProperties.setMaterialized(properties.isMaterialized()); + graphqlProperties.setLanguage(properties.getViewLanguage()); + graphqlProperties.setLogic(properties.getViewLogic()); + dataset.setViewProperties(graphqlProperties); + } + + private void mapGlobalTags( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataset.setGlobalTags(globalTags); + dataset.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); - } + private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index dbaaf27a3f2bc..25639e431fac1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -8,20 +8,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class DatasetProfileMapper implements TimeSeriesAspectMapper { +public class DatasetProfileMapper + implements TimeSeriesAspectMapper { public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); - public static com.linkedin.datahub.graphql.generated.DatasetProfile map(@Nonnull final EnvelopedAspect envelopedAspect) { + public static com.linkedin.datahub.graphql.generated.DatasetProfile map( + @Nonnull final EnvelopedAspect envelopedAspect) { return INSTANCE.apply(envelopedAspect); } @Override - public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull final EnvelopedAspect envelopedAspect) { + public com.linkedin.datahub.graphql.generated.DatasetProfile apply( + @Nonnull final EnvelopedAspect envelopedAspect) { - DatasetProfile gmsProfile = GenericRecordUtils - .deserializeAspect( + DatasetProfile gmsProfile = + GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), envelopedAspect.getAspect().getContentType(), DatasetProfile.class); @@ -35,13 +37,16 @@ public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull fina result.setTimestampMillis(gmsProfile.getTimestampMillis()); if (gmsProfile.hasFieldProfiles()) { result.setFieldProfiles( - gmsProfile.getFieldProfiles().stream().map(DatasetProfileMapper::mapFieldProfile).collect(Collectors.toList())); + gmsProfile.getFieldProfiles().stream() + .map(DatasetProfileMapper::mapFieldProfile) + .collect(Collectors.toList())); } return result; } - private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile(DatasetFieldProfile gmsProfile) { + private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile( + DatasetFieldProfile gmsProfile) { final com.linkedin.datahub.graphql.generated.DatasetFieldProfile result = new com.linkedin.datahub.graphql.generated.DatasetFieldProfile(); result.setFieldPath(gmsProfile.getFieldPath()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 78c1299ed9bd9..0b05d420030b5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -22,23 +24,19 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetUpdateInputMapper implements InputModelMapper, Urn> { +public class DatasetUpdateInputMapper + implements InputModelMapper, Urn> { public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(datasetUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,8 +44,10 @@ public Collection apply( auditStamp.setTime(System.currentTimeMillis()); if (datasetUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (datasetUpdateInput.getDeprecation() != null) { @@ -58,29 +58,32 @@ public Collection apply( } deprecation.setNote(datasetUpdateInput.getDeprecation().getNote()); deprecation.setActor(actor, SetMode.IGNORE_NULL); - proposals.add(updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); } if (datasetUpdateInput.getInstitutionalMemory() != null) { - proposals.add(updateMappingHelper.aspectToProposal(InstitutionalMemoryUpdateMapper - .map(datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + INSTITUTIONAL_MEMORY_ASPECT_NAME)); } if (datasetUpdateInput.getTags() != null || datasetUpdateInput.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); if (datasetUpdateInput.getGlobalTags() != null) { - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getGlobalTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } @@ -89,28 +92,32 @@ public Collection apply( final EditableSchemaMetadata editableSchemaMetadata = new EditableSchemaMetadata(); editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( - datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream().map( - element -> mapSchemaFieldInfo(element) - ).collect(Collectors.toList()))); + datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() + .map(element -> mapSchemaFieldInfo(element)) + .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); } if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription(datasetUpdateInput.getEditableProperties().getDescription()); + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); } return proposals; } private EditableSchemaFieldInfo mapSchemaFieldInfo( - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo - ) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); if (schemaFieldInfo.getDescription() != null) { @@ -120,11 +127,14 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( if (schemaFieldInfo.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(schemaFieldInfo.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element)).collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + schemaFieldInfo.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } return output; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index 922574d5051d3..f54adbe8ba26c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -4,39 +4,34 @@ import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; - import javax.annotation.Nonnull; - public class EditableSchemaFieldInfoMapper { - public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); + public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); - public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(fieldInfo, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( + @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(fieldInfo, entityUrn); + } - public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); - if (input.hasDescription()) { - result.setDescription((input.getDescription())); - } - if (input.hasFieldPath()) { - result.setFieldPath((input.getFieldPath())); - } - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - return result; + public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( + @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = + new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); + if (input.hasDescription()) { + result.setDescription((input.getDescription())); + } + if (input.hasFieldPath()) { + result.setFieldPath((input.getFieldPath())); + } + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 376558d2fd18c..3cf012a523d54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; -import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.common.urn.Urn; - -import javax.annotation.Nonnull; +import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EditableSchemaMetadataMapper { - public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - - public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(metadata, entityUrn); - } + public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply(@Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); - result.setEditableSchemaFieldInfo(input.getEditableSchemaFieldInfo().stream().map(schemaField -> - EditableSchemaFieldInfoMapper.map(schemaField, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( + @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( + @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = + new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); + result.setEditableSchemaFieldInfo( + input.getEditableSchemaFieldInfo().stream() + .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b76767fa5d045..b99b243da5b94 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -5,14 +5,12 @@ import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import lombok.extern.slf4j.Slf4j; - import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { - private ForeignKeyConstraintMapper() { } + private ForeignKeyConstraintMapper() {} public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); @@ -22,15 +20,15 @@ public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint } if (constraint.hasSourceFields()) { result.setSourceFields( - constraint.getSourceFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getSourceFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( - constraint.getForeignFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getForeignFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index 515cba5e99c74..dd345bebf657f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -5,63 +5,66 @@ import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; - import javax.annotation.Nonnull; -public class PlatformSchemaMapper implements ModelMapper { +public class PlatformSchemaMapper + implements ModelMapper { - public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); + public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); - } + public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(metadata); + } - @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { - Object result; - if (input.isSchemaless()) { - return null; - } else if (input.isPrestoDDL()) { - final TableSchema prestoSchema = new TableSchema(); - prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); - result = prestoSchema; - } else if (input.isOracleDDL()) { - final TableSchema oracleSchema = new TableSchema(); - oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); - result = oracleSchema; - } else if (input.isMySqlDDL()) { - final TableSchema mySqlSchema = new TableSchema(); - mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); - result = mySqlSchema; - } else if (input.isKafkaSchema()) { - final TableSchema kafkaSchema = new TableSchema(); - kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); - result = kafkaSchema; - } else if (input.isOrcSchema()) { - final TableSchema orcSchema = new TableSchema(); - orcSchema.setSchema(input.getOrcSchema().getSchema()); - result = orcSchema; - } else if (input.isBinaryJsonSchema()) { - final TableSchema binaryJsonSchema = new TableSchema(); - binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); - result = binaryJsonSchema; - } else if (input.isEspressoSchema()) { - final KeyValueSchema espressoSchema = new KeyValueSchema(); - espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); - espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); - result = espressoSchema; - } else if (input.isKeyValueSchema()) { - final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); - otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); - otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); - result = otherKeyValueSchema; - } else if (input.isOtherSchema()) { - final TableSchema otherTableSchema = new TableSchema(); - otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); - result = otherTableSchema; - } else { - throw new RuntimeException(String.format("Unrecognized platform schema type %s provided", input.memberType().getType().name())); - } - return (PlatformSchema) result; + @Override + public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + Object result; + if (input.isSchemaless()) { + return null; + } else if (input.isPrestoDDL()) { + final TableSchema prestoSchema = new TableSchema(); + prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); + result = prestoSchema; + } else if (input.isOracleDDL()) { + final TableSchema oracleSchema = new TableSchema(); + oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); + result = oracleSchema; + } else if (input.isMySqlDDL()) { + final TableSchema mySqlSchema = new TableSchema(); + mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); + result = mySqlSchema; + } else if (input.isKafkaSchema()) { + final TableSchema kafkaSchema = new TableSchema(); + kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); + result = kafkaSchema; + } else if (input.isOrcSchema()) { + final TableSchema orcSchema = new TableSchema(); + orcSchema.setSchema(input.getOrcSchema().getSchema()); + result = orcSchema; + } else if (input.isBinaryJsonSchema()) { + final TableSchema binaryJsonSchema = new TableSchema(); + binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); + result = binaryJsonSchema; + } else if (input.isEspressoSchema()) { + final KeyValueSchema espressoSchema = new KeyValueSchema(); + espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); + espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); + result = espressoSchema; + } else if (input.isKeyValueSchema()) { + final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); + otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); + otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); + result = otherKeyValueSchema; + } else if (input.isOtherSchema()) { + final TableSchema otherTableSchema = new TableSchema(); + otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); + result = otherTableSchema; + } else { + throw new RuntimeException( + String.format( + "Unrecognized platform schema type %s provided", + input.memberType().getType().name())); } + return (PlatformSchema) result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f05a1adb6b443..f53803ce5be85 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -3,72 +3,75 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; - +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import javax.annotation.Nonnull; public class SchemaFieldMapper { - public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaField map(@Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static SchemaField map( + @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public SchemaField apply(@Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { - final SchemaField result = new SchemaField(); - result.setDescription(input.getDescription()); - result.setFieldPath(input.getFieldPath()); - result.setJsonPath(input.getJsonPath()); - result.setRecursive(input.isRecursive()); - result.setNullable(input.isNullable()); - result.setNativeDataType(input.getNativeDataType()); - result.setType(mapSchemaFieldDataType(input.getType())); - result.setLabel(input.getLabel()); - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - result.setIsPartOfKey(input.isIsPartOfKey()); - result.setIsPartitioningKey(input.isIsPartitioningKey()); - return result; + public SchemaField apply( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + final SchemaField result = new SchemaField(); + result.setDescription(input.getDescription()); + result.setFieldPath(input.getFieldPath()); + result.setJsonPath(input.getJsonPath()); + result.setRecursive(input.isRecursive()); + result.setNullable(input.isNullable()); + result.setNativeDataType(input.getNativeDataType()); + result.setType(mapSchemaFieldDataType(input.getType())); + result.setLabel(input.getLabel()); + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + result.setIsPartOfKey(input.isIsPartOfKey()); + result.setIsPartitioningKey(input.isIsPartitioningKey()); + return result; + } - private SchemaFieldDataType mapSchemaFieldDataType(@Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { - final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); - if (type.isBytesType()) { - return SchemaFieldDataType.BYTES; - } else if (type.isFixedType()) { - return SchemaFieldDataType.FIXED; - } else if (type.isBooleanType()) { - return SchemaFieldDataType.BOOLEAN; - } else if (type.isStringType()) { - return SchemaFieldDataType.STRING; - } else if (type.isNumberType()) { - return SchemaFieldDataType.NUMBER; - } else if (type.isDateType()) { - return SchemaFieldDataType.DATE; - } else if (type.isTimeType()) { - return SchemaFieldDataType.TIME; - } else if (type.isEnumType()) { - return SchemaFieldDataType.ENUM; - } else if (type.isNullType()) { - return SchemaFieldDataType.NULL; - } else if (type.isArrayType()) { - return SchemaFieldDataType.ARRAY; - } else if (type.isMapType()) { - return SchemaFieldDataType.MAP; - } else if (type.isRecordType()) { - return SchemaFieldDataType.STRUCT; - } else if (type.isUnionType()) { - return SchemaFieldDataType.UNION; - } else { - throw new RuntimeException(String.format("Unrecognized SchemaFieldDataType provided %s", - type.memberType().toString())); - } + private SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isNullType()) { + return SchemaFieldDataType.NULL; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else if (type.isRecordType()) { + return SchemaFieldDataType.STRUCT; + } else if (type.isUnionType()) { + return SchemaFieldDataType.UNION; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index eb793cc17efb6..d0424ba89eca1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -4,44 +4,53 @@ import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.stream.Collectors; public class SchemaMapper { - public static final SchemaMapper INSTANCE = new SchemaMapper(); + public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); - } + public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, null, entityUrn); + } - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); - } + public static Schema map( + @Nonnull final SchemaMetadata metadata, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, systemMetadata, entityUrn); + } - public Schema apply(@Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - final Schema result = new Schema(); - if (input.getDataset() != null) { - result.setDatasetUrn(input.getDataset().toString()); - } - if (systemMetadata != null) { - result.setLastObserved(systemMetadata.getLastObserved()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - if (input.getForeignKeys() != null) { - result.setForeignKeys(input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) - .collect(Collectors.toList())); - } - return result; + public Schema apply( + @Nonnull final com.linkedin.schema.SchemaMetadata input, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + final Schema result = new Schema(); + if (input.getDataset() != null) { + result.setDatasetUrn(input.getDataset().toString()); + } + if (systemMetadata != null) { + result.setLastObserved(systemMetadata.getLastObserved()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + if (input.getForeignKeys() != null) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(ForeignKeyConstraintMapper::map) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 00cb91bed8abb..31381073a16dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -6,43 +6,42 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class SchemaMetadataMapper { - public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); + public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); - public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(aspect, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(aspect, entityUrn); + } - public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - final com.linkedin.datahub.graphql.generated.SchemaMetadata result = - new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + final com.linkedin.datahub.graphql.generated.SchemaMetadata result = + new com.linkedin.datahub.graphql.generated.SchemaMetadata(); - if (input.hasDataset()) { - result.setDatasetUrn(input.getDataset().toString()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); - if (input.hasForeignKeys()) { - result.setForeignKeys(input.getForeignKeys().stream().map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map( - foreignKeyConstraint - )).collect(Collectors.toList())); - } - return result; + if (input.hasDataset()) { + result.setDatasetUrn(input.getDataset().toString()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setAspectVersion(aspect.getVersion()); + if (input.hasForeignKeys()) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 241c4872b1caa..727e8629f74b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -14,11 +16,11 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.generated.VersionedDataset; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -38,13 +40,10 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class VersionedDatasetMapper implements ModelMapper { @@ -67,28 +66,52 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { SystemMetadata schemaSystemMetadata = getSystemMetadata(aspectMap, SCHEMA_METADATA_ASPECT_NAME); mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema( + SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -104,12 +127,15 @@ private void mapDatasetKey(@Nonnull VersionedDataset dataset, @Nonnull DataMap d final DatasetKey gmsKey = new DatasetKey(dataMap); dataset.setName(gmsKey.getName()); dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + dataset.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); } - private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { + private void mapDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { final DatasetProperties gmsProperties = new DatasetProperties(dataMap); final com.linkedin.datahub.graphql.generated.DatasetProperties properties = new com.linkedin.datahub.graphql.generated.DatasetProperties(); @@ -118,7 +144,8 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da if (gmsProperties.getExternalUrl() != null) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); if (gmsProperties.getName() != null) { properties.setName(gmsProperties.getName()); } else { @@ -128,8 +155,10 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da dataset.setProperties(properties); } - private void mapEditableDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); + private void mapEditableDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); dataset.setEditableProperties(editableProperties); @@ -145,18 +174,21 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index df8de87ff69ff..51ef254f52225 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -5,32 +5,32 @@ import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DomainAssociationMapper { - public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); + public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); - public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, - @Nonnull final String entityUrn - ) { - return INSTANCE.apply(domains, entityUrn); - } + public static DomainAssociation map( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + return INSTANCE.apply(domains, entityUrn); + } - public DomainAssociation apply(@Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { - DomainAssociation association = new DomainAssociation(); - association.setDomain(Domain.builder() - .setType(EntityType.DOMAIN) - .setUrn(domains.getDomains().get(0).toString()).build()); - association.setAssociatedUrn(entityUrn); - return association; - } - return null; + public DomainAssociation apply( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0) { + DomainAssociation association = new DomainAssociation(); + association.setDomain( + Domain.builder() + .setType(EntityType.DOMAIN) + .setUrn(domains.getDomains().get(0).toString()) + .build()); + association.setAssociatedUrn(entityUrn); + return association; } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index fe52b5eff718f..7ff1f70311b22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; - public class DomainMapper { public static Domain map(final EntityResponse entityResponse) { @@ -33,30 +32,38 @@ public static Domain map(final EntityResponse entityResponse) { return null; } - final EnvelopedAspect envelopedDomainProperties = aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedDomainProperties = + aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (envelopedDomainProperties != null) { - result.setProperties(mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); + result.setProperties( + mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } return result; } - private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties(final DomainProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = new com.linkedin.datahub.graphql.generated.DomainProperties(); + private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( + final DomainProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.DomainProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); return propertiesResult; } - private DomainMapper() { } + private DomainMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 4879c339d99fa..06d5df9354380 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -17,8 +17,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashSet; @@ -29,19 +27,21 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; +public class DomainType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { -public class DomainType implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; - public DomainType(final EntityClient entityClient) { + public DomainType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -61,28 +61,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List domainUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List domainUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.DOMAIN_ENTITY_NAME, - new HashSet<>(domainUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.DOMAIN_ENTITY_NAME, + new HashSet<>(domainUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DomainMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DomainMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Domains", e); @@ -90,25 +92,31 @@ public List> batchLoad(@Nonnull List urns, @No } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Domain entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Domain entity type"); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } - private Urn getUrn(final String urnStr) { try { return Urn.createFromString(urnStr); @@ -116,4 +124,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index f2c9e962811b9..9a27a1fba853f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryNodeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -20,18 +24,12 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -public class GlossaryNodeType implements com.linkedin.datahub.graphql.types.EntityType { +public class GlossaryNodeType + implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME - ); + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_NODE_KEY_ASPECT_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); private final EntityClient _entityClient; @@ -55,25 +53,31 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryNodeUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryNodeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map glossaryNodeMap = _entityClient.batchGetV2(GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(glossaryNodeUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map glossaryNodeMap = + _entityClient.batchGetV2( + GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(glossaryNodeUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsGlossaryNode -> - gmsGlossaryNode == null ? null - : DataFetcherResult.newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) - .build()) + .map( + gmsGlossaryNode -> + gmsGlossaryNode == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load GlossaryNodes", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 3574c17a50923..c40740238f61e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.ArrayList; @@ -39,118 +42,135 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class GlossaryTermType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of(""); - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_TERM_KEY_ASPECT_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, - GLOSSARY_RELATED_TERM_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - BROWSE_PATHS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME - ); - - private final EntityClient _entityClient; - - public GlossaryTermType(final EntityClient entityClient) { - _entityClient = entityClient; +public class GlossaryTermType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of(""); + + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_TERM_KEY_ASPECT_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + GLOSSARY_RELATED_TERM_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + BROWSE_PATHS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME); + + private final EntityClient _entityClient; + + public GlossaryTermType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return GlossaryTerm.class; + } + + @Override + public EntityType type() { + return EntityType.GLOSSARY_TERM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryTermUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map glossaryTermMap = + _entityClient.batchGetV2( + GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(glossaryTermUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : glossaryTermUrns) { + gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsGlossaryTerm -> + gmsGlossaryTerm == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load GlossaryTerms", e); } - - @Override - public Class objectClass() { - return GlossaryTerm.class; - } - - @Override - public EntityType type() { - return EntityType.GLOSSARY_TERM; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryTermUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map glossaryTermMap = _entityClient.batchGetV2(GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(glossaryTermUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : glossaryTermUrns) { - gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsGlossaryTerm -> - gmsGlossaryTerm == null ? null - : DataFetcherResult.newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load GlossaryTerms", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "glossaryTerm", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "glossaryTerm", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "glossaryTerm", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } - + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java index 93b6ab53d5a3a..59f7cc8a9c828 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java @@ -1,27 +1,27 @@ package com.linkedin.datahub.graphql.types.glossary; import com.linkedin.common.urn.GlossaryTermUrn; - import java.net.URISyntaxException; import java.util.regex.Pattern; public class GlossaryTermUtils { - private GlossaryTermUtils() { } + private GlossaryTermUtils() {} - static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { - try { - return GlossaryTermUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); - } + static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { + try { + return GlossaryTermUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); } + } - public static String getGlossaryTermName(String hierarchicalName) { - if (hierarchicalName.contains(".")) { - String[] nodes = hierarchicalName.split(Pattern.quote(".")); - return nodes[nodes.length - 1]; - } - return hierarchicalName; + public static String getGlossaryTermName(String hierarchicalName) { + if (hierarchicalName.contains(".")) { + String[] nodes = hierarchicalName.split(Pattern.quote(".")); + return nodes[nodes.length - 1]; } + return hierarchicalName; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 6a1d849dd23bf..901361eb0b2be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -13,11 +15,8 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class GlossaryNodeMapper implements ModelMapper { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); @@ -35,11 +34,14 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_NODE_INFO_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + mappingHelper.mapToResult( + GLOSSARY_NODE_INFO_ASPECT_NAME, + (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java index 2f99700bc30a1..12ba8c1e088f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java @@ -1,41 +1,44 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.GlossaryTermInfo; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermInfoMapper { - public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); + public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); - public static GlossaryTermInfo map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - return INSTANCE.apply(glossaryTermInfo, entityUrn); - } + public static GlossaryTermInfo map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + return INSTANCE.apply(glossaryTermInfo, entityUrn); + } - public GlossaryTermInfo apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); - glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); - if (glossaryTermInfo.hasName()) { - glossaryTermInfoResult.setName(glossaryTermInfo.getName()); - } - if (glossaryTermInfo.hasSourceRef()) { - glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); - } - if (glossaryTermInfo.hasSourceUrl()) { - glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); - } - if (glossaryTermInfo.hasCustomProperties()) { - glossaryTermInfoResult.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); - } - return glossaryTermInfoResult; + public GlossaryTermInfo apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = + new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); + glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); + if (glossaryTermInfo.hasName()) { + glossaryTermInfoResult.setName(glossaryTermInfo.getName()); + } + if (glossaryTermInfo.hasSourceRef()) { + glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); + } + if (glossaryTermInfo.hasSourceUrl()) { + glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); + } + if (glossaryTermInfo.hasCustomProperties()) { + glossaryTermInfoResult.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } + return glossaryTermInfoResult; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index c98177b458dea..a02f79535399f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; @@ -15,71 +17,82 @@ import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; -import com.linkedin.domain.Domains; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermMapper implements ModelMapper { - public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); + public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); - public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { - GlossaryTerm result = new GlossaryTerm(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { + GlossaryTerm result = new GlossaryTerm(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.GLOSSARY_TERM); - final String legacyName = GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.GLOSSARY_TERM); + final String legacyName = + GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setGlossaryTermInfo(GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setProperties(GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setGlossaryTermInfo( + GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setProperties( + GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - // If there's no name property, resort to the legacy name computation. - if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { - result.getGlossaryTermInfo().setName(legacyName); - } - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } - return mappingHelper.getResult(); + // If there's no name property, resort to the legacy name computation. + if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { + result.getGlossaryTermInfo().setName(legacyName); } - - private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); - glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); - glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } + return mappingHelper.getResult(); + } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); - } + private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); + glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); + glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + } + + private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java index 6b35833183393..94edfcbd31455 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java @@ -2,25 +2,27 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GlossaryTermProperties; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermPropertiesMapper { public static final GlossaryTermPropertiesMapper INSTANCE = new GlossaryTermPropertiesMapper(); - public static GlossaryTermProperties map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + public static GlossaryTermProperties map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { return INSTANCE.apply(glossaryTermInfo, entityUrn); } - public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); + public GlossaryTermProperties apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = + new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); result.setDefinition(glossaryTermInfo.getDefinition()); result.setDescription(glossaryTermInfo.getDefinition()); result.setTermSource(glossaryTermInfo.getTermSource()); @@ -34,7 +36,8 @@ public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.Glossar result.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); } if (glossaryTermInfo.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index a64b0f7dc64fb..8494eace22244 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,51 +1,52 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.GlossaryTerms; -import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.datahub.graphql.generated.GlossaryTerm; +import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermsMapper { - public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); - - public static GlossaryTerms map( - @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(glossaryTerms, entityUrn); - } - - public GlossaryTerms apply(@Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); - result.setTerms(glossaryTerms.getTerms().stream().map( - association -> this.mapGlossaryTermAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( - @Nonnull final GlossaryTermAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); - final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); - resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); - resultGlossaryTerm.setUrn(input.getUrn().toString()); - resultGlossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); - result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } - + public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); + + public static GlossaryTerms map( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(glossaryTerms, entityUrn); + } + + public GlossaryTerms apply( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTerms result = + new com.linkedin.datahub.graphql.generated.GlossaryTerms(); + result.setTerms( + glossaryTerms.getTerms().stream() + .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( + @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = + new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); + resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); + resultGlossaryTerm.setUrn(input.getUrn().toString()); + resultGlossaryTerm.setName( + GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); + result.setTerm(resultGlossaryTerm); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index d575a81f4ae03..621fcf5f04140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -3,26 +3,27 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; - import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AutoCompleteResultsMapper + implements ModelMapper { -public class AutoCompleteResultsMapper implements ModelMapper { - - public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); + public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); - } + public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(results); + } - @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { - final AutoCompleteResults result = new AutoCompleteResults(); - result.setQuery(input.getQuery()); - result.setSuggestions(input.getSuggestions()); - result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect( - Collectors.toList())); - return result; - } + @Override + public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + final AutoCompleteResults result = new AutoCompleteResults(); + result.setQuery(input.getQuery()); + result.setSuggestions(input.getSuggestions()); + result.setEntities( + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index ea44c4409b709..689ff82147e15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -2,27 +2,27 @@ import com.linkedin.datahub.graphql.Constants; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathMapper implements ModelMapper { - public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); + public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); - } + public static BrowsePath map(@Nonnull final String input) { + return INSTANCE.apply(input); + } - @Override - public BrowsePath apply(@Nonnull final String input) { - final BrowsePath browsePath = new BrowsePath(); - final List path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()); - browsePath.setPath(path); - return browsePath; - } + @Override + public BrowsePath apply(@Nonnull final String input) { + final BrowsePath browsePath = new BrowsePath(); + final List path = + Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList()); + browsePath.setPath(path); + return browsePath; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index 4dac4468a80d5..ae70823d675d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,25 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; public class BrowsePathsMapper implements ModelMapper, List> { - public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); + public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List map(@Nonnull final List input) { - return INSTANCE.apply(input); - } + public static List map(@Nonnull final List input) { + return INSTANCE.apply(input); + } - @Override - public List apply(@Nonnull final List input) { - List results = new ArrayList<>(); - for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); - } - return results; + @Override + public List apply(@Nonnull final List input) { + List results = new ArrayList<>(); + for (String pathStr : input) { + results.add(BrowsePathMapper.map(pathStr)); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index c3e74c28fe59d..5cac03b19a74c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -9,10 +9,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class BrowseResultMapper { - private BrowseResultMapper() { - } + private BrowseResultMapper() {} public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); @@ -31,7 +29,9 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setMetadata(browseResultMetadata); List entities = - input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(Collectors.toList()); + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList()); result.setEntities(entities); List groups = @@ -41,7 +41,8 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) return result; } - private static BrowseResultGroup mapGroup(@Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { + private static BrowseResultGroup mapGroup( + @Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { final BrowseResultGroup result = new BrowseResultGroup(); result.setName(group.getName()); result.setCount(group.getCount()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index e6172debb439e..c58341f994d4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,8 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Maps an input of type I to an output of type O with actor context. - */ +/** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper { - O apply(final I input, final A actor); -} \ No newline at end of file + O apply(final I input, final A actor); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 2a615b24eaac2..7c7dab2e02472 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; @@ -10,75 +13,87 @@ import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class MapperUtils { - private MapperUtils() { - - } + private MapperUtils() {} public static SearchResult mapResult(SearchEntity searchEntity) { - return new SearchResult(UrnToEntityMapper.map(searchEntity.getEntity()), + return new SearchResult( + UrnToEntityMapper.map(searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), getMatchedFieldEntry(searchEntity.getMatchedFields())); } - public static FacetMetadata mapFacet(com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { + public static FacetMetadata mapFacet( + com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); - List aggregationFacets = List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); - List isEntityTypeFilter = aggregationFacets.stream().map( - facet -> facet.equals("entity") || facet.contains("_entityType")).collect(Collectors.toList()); + List aggregationFacets = + List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); + List isEntityTypeFilter = + aggregationFacets.stream() + .map(facet -> facet.equals("entity") || facet.contains("_entityType")) + .collect(Collectors.toList()); facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( - Optional.ofNullable(aggregationMetadata.getDisplayName()).orElse(aggregationMetadata.getName())); - facetMetadata.setAggregations(aggregationMetadata.getFilterValues() - .stream() - .map(filterValue -> new AggregationMetadata(convertFilterValue(filterValue.getValue(), isEntityTypeFilter), - filterValue.getFacetCount(), - filterValue.getEntity() == null ? null : UrnToEntityMapper.map(filterValue.getEntity()))) - .collect(Collectors.toList())); + Optional.ofNullable(aggregationMetadata.getDisplayName()) + .orElse(aggregationMetadata.getName())); + facetMetadata.setAggregations( + aggregationMetadata.getFilterValues().stream() + .map( + filterValue -> + new AggregationMetadata( + convertFilterValue(filterValue.getValue(), isEntityTypeFilter), + filterValue.getFacetCount(), + filterValue.getEntity() == null + ? null + : UrnToEntityMapper.map(filterValue.getEntity()))) + .collect(Collectors.toList())); return facetMetadata; } public static String convertFilterValue(String filterValue, List isEntityTypeFilter) { String[] aggregations = filterValue.split(AGGREGATION_SEPARATOR_CHAR); - return IntStream.range(0, aggregations.length).mapToObj( - idx -> idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) ? EntityTypeMapper.getType(aggregations[idx]).toString() : aggregations[idx]) + return IntStream.range(0, aggregations.length) + .mapToObj( + idx -> + idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) + ? EntityTypeMapper.getType(aggregations[idx]).toString() + : aggregations[idx]) .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - public static List getMatchedFieldEntry(List highlightMetadata) { + public static List getMatchedFieldEntry( + List highlightMetadata) { return highlightMetadata.stream() - .map(field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { Urn urn = Urn.createFromString(field.getValue()); matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { + } catch (URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } } - } - return matchedField; - }) + return matchedField; + }) .collect(Collectors.toList()); } - public static SearchSuggestion mapSearchSuggestion(com.linkedin.metadata.search.SearchSuggestion suggestion) { - return new SearchSuggestion(suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + public static SearchSuggestion mapSearchSuggestion( + com.linkedin.metadata.search.SearchSuggestion suggestion) { + return new SearchSuggestion( + suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 08afbd510b98f..2167be9f27ca8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Simple interface for classes capable of mapping an input of type I to - * an output of type O. - */ +/** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper { - O apply(final I input); + O apply(final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java index 903e962524734..e0ac0336c8715 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java @@ -3,7 +3,5 @@ import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.metadata.aspect.EnvelopedAspect; - -public interface TimeSeriesAspectMapper extends ModelMapper { - -} +public interface TimeSeriesAspectMapper + extends ModelMapper {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index dd00727fc2845..baf632ae8bdf4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,10 +15,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnScrollAcrossLineageResultsMapper { public static ScrollAcrossLineageResults map( LineageScrollResult searchResult) { @@ -30,8 +29,12 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index fd774d73f3df7..72eb71cd095bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnScrollResultsMapper { public static ScrollResults map( com.linkedin.metadata.search.ScrollResult scrollResult) { @@ -25,8 +24,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index ae87d0269c188..642fe90cf2aed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -1,23 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; -import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; +import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnSearchAcrossLineageResultsMapper { public static SearchAcrossLineageResults map( LineageSearchResult searchResult) { @@ -32,17 +31,25 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); if (input.hasFreshness()) { FreshnessStats outputFreshness = new FreshnessStats(); outputFreshness.setCached(input.getFreshness().isCached()); - outputFreshness.setSystemFreshness(input.getFreshness().getSystemFreshness().entrySet().stream().map(x -> - SystemFreshness.builder() - .setSystemName(x.getKey()) - .setFreshnessMillis(x.getValue()) - .build()).collect(Collectors.toList())); + outputFreshness.setSystemFreshness( + input.getFreshness().getSystemFreshness().entrySet().stream() + .map( + x -> + SystemFreshness.builder() + .setSystemName(x.getKey()) + .setFreshnessMillis(x.getValue()) + .build()) + .collect(Collectors.toList())); result.setFreshness(outputFreshness); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index b16e2f10d1df7..d814c44e469bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnSearchResultsMapper { public static SearchResults map( com.linkedin.metadata.search.SearchResult searchResult) { @@ -25,9 +24,16 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); - result.setSuggestions(searchResultMetadata.getSuggestions().stream().map(MapperUtils::mapSearchSuggestion).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); + result.setSuggestions( + searchResultMetadata.getSuggestions().stream() + .map(MapperUtils::mapSearchSuggestion) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index f5594afc1a5b5..da3ddd1115437 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,103 +41,122 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLFeatureTableType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); - private final EntityClient _entityClient; - - public MLFeatureTableType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLFEATURE_TABLE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLFeatureTable.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlFeatureTableUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlFeatureTableMap = _entityClient.batchGetV2(ML_FEATURE_TABLE_ENTITY_NAME, - new HashSet<>(mlFeatureTableUrns), null, context.getAuthentication()); - - final List gmsResults = mlFeatureTableUrns.stream() - .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlFeatureTable -> gmsMlFeatureTable == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatureTables", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeatureTable", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlFeatureTable", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLFeatureTableType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); + private final EntityClient _entityClient; + + public MLFeatureTableType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLFEATURE_TABLE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLFeatureTable.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlFeatureTableUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlFeatureTableMap = + _entityClient.batchGetV2( + ML_FEATURE_TABLE_ENTITY_NAME, + new HashSet<>(mlFeatureTableUrns), + null, + context.getAuthentication()); + + final List gmsResults = + mlFeatureTableUrns.stream() + .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlFeatureTable -> + gmsMlFeatureTable == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatureTables", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeatureTable", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlFeatureTable", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index f5e0d80948bcc..6f94ea44cd476 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,94 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLFeatureType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLFeatureType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLFeatureType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLFEATURE; - } + @Override + public EntityType type() { + return EntityType.MLFEATURE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLFeature.class; - } + @Override + public Class objectClass() { + return MLFeature.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlFeatureUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlFeatureUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map mlFeatureMap = _entityClient.batchGetV2(ML_FEATURE_ENTITY_NAME, - new HashSet<>(mlFeatureUrns), null, context.getAuthentication()); + try { + final Map mlFeatureMap = + _entityClient.batchGetV2( + ML_FEATURE_ENTITY_NAME, + new HashSet<>(mlFeatureUrns), + null, + context.getAuthentication()); - final List gmsResults = mlFeatureUrns.stream() - .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) - .collect(Collectors.toList()); + final List gmsResults = + mlFeatureUrns.stream() + .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlFeature -> gmsMlFeature == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatures", e); - } + return gmsResults.stream() + .map( + gmsMlFeature -> + gmsMlFeature == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureMapper.map(gmsMlFeature)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatures", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeature", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeature", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 05b70c15bafc6..d505b70effdd4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,104 +41,123 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelGroupType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelGroupType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL_GROUP; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModelGroup.class; - } - - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlModelGroupUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_GROUP_ENTITY_NAME, - new HashSet<>(mlModelGroupUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelGroupUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModelGroup -> gmsMlModelGroup == null ? null - : DataFetcherResult.newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModelGroups", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModelGroup", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModelGroup", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelGroupType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModelGroup.class; + } + + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlModelGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_GROUP_ENTITY_NAME, + new HashSet<>(mlModelGroupUrns), + null, + context.getAuthentication()); + + final List gmsResults = + mlModelGroupUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModelGroup -> + gmsMlModelGroup == null + ? null + : DataFetcherResult.newResult() + .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModelGroups", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModelGroup", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlModelGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index ef4be247a246b..27b791d78e78e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,102 +41,116 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelType implements SearchableEntityType, BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModel.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlModelUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_ENTITY_NAME, - new HashSet<>(mlModelUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModel -> gmsMlModel == null ? null - : DataFetcherResult.newResult() - .data(MLModelMapper.map(gmsMlModel)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModels", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModel", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModel", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelType + implements SearchableEntityType, BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModel.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlModelUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_ENTITY_NAME, new HashSet<>(mlModelUrns), null, context.getAuthentication()); + + final List gmsResults = + mlModelUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModel -> + gmsMlModel == null + ? null + : DataFetcherResult.newResult() + .data(MLModelMapper.map(gmsMlModel)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModels", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModel", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java index ff51bab6c114e..ccecb0ae6406f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java @@ -1,44 +1,47 @@ package com.linkedin.datahub.graphql.types.mlmodel; -import java.net.URISyntaxException; - import com.linkedin.common.urn.MLFeatureUrn; import com.linkedin.common.urn.MLModelUrn; import com.linkedin.common.urn.Urn; +import java.net.URISyntaxException; public class MLModelUtils { - private MLModelUtils() { } + private MLModelUtils() {} - static MLModelUrn getMLModelUrn(String modelUrn) { - try { - return MLModelUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); - } + static MLModelUrn getMLModelUrn(String modelUrn) { + try { + return MLModelUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); } - - static Urn getMLModelGroupUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getMLModelGroupUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); } - - static MLFeatureUrn getMLFeatureUrn(String modelUrn) { - try { - return MLFeatureUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); - } + } + + static MLFeatureUrn getMLFeatureUrn(String modelUrn) { + try { + return MLFeatureUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); } - - static Urn getUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index a6963e6b20abd..10cfe181dd292 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,95 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLPrimaryKeyType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLPrimaryKeyType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLPrimaryKeyType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLPRIMARY_KEY; - } + @Override + public EntityType type() { + return EntityType.MLPRIMARY_KEY; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLPrimaryKey.class; - } + @Override + public Class objectClass() { + return MLPrimaryKey.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlPrimaryKeyUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlPrimaryKeyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map mlPrimaryKeyMap = _entityClient.batchGetV2(ML_PRIMARY_KEY_ENTITY_NAME, - new HashSet<>(mlPrimaryKeyUrns), null, context.getAuthentication()); + try { + final Map mlPrimaryKeyMap = + _entityClient.batchGetV2( + ML_PRIMARY_KEY_ENTITY_NAME, + new HashSet<>(mlPrimaryKeyUrns), + null, + context.getAuthentication()); - final List gmsResults = mlPrimaryKeyUrns.stream() - .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) - .collect(Collectors.toList()); + final List gmsResults = + mlPrimaryKeyUrns.stream() + .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlPrimaryKey -> gmsMlPrimaryKey == null ? null - : DataFetcherResult.newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); - } + return gmsResults.stream() + .map( + gmsMlPrimaryKey -> + gmsMlPrimaryKey == null + ? null + : DataFetcherResult.newResult() + .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlPrimaryKey", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlPrimaryKey", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlPrimaryKey", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index c82909d49acbf..7db1216e1390d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class BaseDataMapper implements ModelMapper { - public static final BaseDataMapper INSTANCE = new BaseDataMapper(); + public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); - } + public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(input); + } - @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { - final BaseData result = new BaseData(); - result.setDataset(input.getDataset().toString()); - result.setMotivation(input.getMotivation()); - result.setPreProcessing(input.getPreProcessing()); - return result; - } + @Override + public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + final BaseData result = new BaseData(); + result.setDataset(input.getDataset().toString()); + result.setMotivation(input.getMotivation()); + result.setPreProcessing(input.getPreProcessing()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index c19cb7bae2aff..108717f325f68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -2,29 +2,34 @@ import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsAndRecommendationsMapper implements ModelMapper { +public class CaveatsAndRecommendationsMapper + implements ModelMapper< + com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { - public static final CaveatsAndRecommendationsMapper INSTANCE = new CaveatsAndRecommendationsMapper(); + public static final CaveatsAndRecommendationsMapper INSTANCE = + new CaveatsAndRecommendationsMapper(); - public static CaveatsAndRecommendations map(@NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); - } + public static CaveatsAndRecommendations map( + @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + return INSTANCE.apply(caveatsAndRecommendations); + } - @Override - public CaveatsAndRecommendations apply(com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); - if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); - } - if (caveatsAndRecommendations.getRecommendations() != null) { - result.setRecommendations(caveatsAndRecommendations.getRecommendations()); - } - if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { - result.setIdealDatasetCharacteristics(caveatsAndRecommendations.getIdealDatasetCharacteristics()); - } - return result; + @Override + public CaveatsAndRecommendations apply( + com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); + if (caveatsAndRecommendations.getCaveats() != null) { + result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + } + if (caveatsAndRecommendations.getRecommendations() != null) { + result.setRecommendations(caveatsAndRecommendations.getRecommendations()); + } + if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { + result.setIdealDatasetCharacteristics( + caveatsAndRecommendations.getIdealDatasetCharacteristics()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 22617a8bc03e7..2226197e673f5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -2,24 +2,24 @@ import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsDetailsMapper implements ModelMapper { +public class CaveatsDetailsMapper + implements ModelMapper { - public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); + public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); - } + public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(input); + } - @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - final CaveatDetails result = new CaveatDetails(); + @Override + public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + final CaveatDetails result = new CaveatDetails(); - result.setCaveatDescription(input.getCaveatDescription()); - result.setGroupsNotRepresented(input.getGroupsNotRepresented()); - result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); - return result; - } + result.setCaveatDescription(input.getCaveatDescription()); + result.setGroupsNotRepresented(input.getGroupsNotRepresented()); + result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 1d967619d43cb..8959e59265e14 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -2,25 +2,27 @@ import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class EthicalConsiderationsMapper implements ModelMapper { +public class EthicalConsiderationsMapper + implements ModelMapper { - public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); + public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); - public static EthicalConsiderations map(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); - } + public static EthicalConsiderations map( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + return INSTANCE.apply(ethicalConsiderations); + } - @Override - public EthicalConsiderations apply(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - final EthicalConsiderations result = new EthicalConsiderations(); - result.setData(ethicalConsiderations.getData()); - result.setHumanLife(ethicalConsiderations.getHumanLife()); - result.setMitigations(ethicalConsiderations.getMitigations()); - result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); - result.setUseCases(ethicalConsiderations.getUseCases()); - return result; - } + @Override + public EthicalConsiderations apply( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + final EthicalConsiderations result = new EthicalConsiderations(); + result.setData(ethicalConsiderations.getData()); + result.setHumanLife(ethicalConsiderations.getHumanLife()); + result.setMitigations(ethicalConsiderations.getMitigations()); + result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); + result.setUseCases(ethicalConsiderations.getUseCases()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 73aa8db362a54..212db94081371 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -3,26 +3,26 @@ import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; - import lombok.NonNull; -public class HyperParameterMapMapper implements ModelMapper { - - public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); +public class HyperParameterMapMapper + implements ModelMapper { - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); - } + public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { - final HyperParameterMap result = new HyperParameterMap(); + public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(input); + } - for (String key: input.keySet()) { - result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); - } + @Override + public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + final HyperParameterMap result = new HyperParameterMap(); - return result; + for (String key : input.keySet()) { + result.setKey(key); + result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 6509b0e6cfa84..f60f34dd7a085 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -6,34 +6,37 @@ import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class HyperParameterValueTypeMapper implements ModelMapper { +public class HyperParameterValueTypeMapper + implements ModelMapper< + com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { - public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); + public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); - public static HyperParameterValueType map(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); - } + public static HyperParameterValueType map( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + return INSTANCE.apply(input); + } - @Override - public HyperParameterValueType apply(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - HyperParameterValueType result = null; + @Override + public HyperParameterValueType apply( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + HyperParameterValueType result = null; - if (input.isString()) { - result = new StringBox(input.getString()); - } else if (input.isBoolean()) { - result = new BooleanBox(input.getBoolean()); - } else if (input.isInt()) { - result = new IntBox(input.getInt()); - } else if (input.isDouble()) { - result = new FloatBox(input.getDouble()); - } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); - } - return result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else if (input.isBoolean()) { + result = new BooleanBox(input.getBoolean()); + } else if (input.isInt()) { + result = new IntBox(input.getInt()); + } else if (input.isDouble()) { + result = new FloatBox(input.getDouble()); + } else if (input.isFloat()) { + result = new FloatBox(new Double(input.getFloat())); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 47598bc2a3e4c..9f724ae71a55e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,29 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class IntendedUseMapper implements ModelMapper { +public class IntendedUseMapper + implements ModelMapper { - public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); + public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); - } + public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(intendedUse); + } - @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - final IntendedUse result = new IntendedUse(); - result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); - result.setPrimaryUses(intendedUse.getPrimaryUses()); - if (intendedUse.getPrimaryUsers() != null) { - result.setPrimaryUsers(intendedUse.getPrimaryUsers().stream().map(v -> IntendedUserType.valueOf(v.toString())).collect(Collectors.toList())); - } - return result; + @Override + public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + final IntendedUse result = new IntendedUse(); + result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); + result.setPrimaryUses(intendedUse.getPrimaryUses()); + if (intendedUse.getPrimaryUsers() != null) { + result.setPrimaryUsers( + intendedUse.getPrimaryUsers().stream() + .map(v -> IntendedUserType.valueOf(v.toString())) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 010ae477251f3..58e59edfa2e38 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -37,91 +37,105 @@ import com.linkedin.ml.metadata.MLFeatureProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper { - public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); + public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeature map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { - final MLFeature result = new MLFeature(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + final MLFeature result = new MLFeature(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + return mappingHelper.getResult(); + } + + private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); + mlFeature.setName(mlFeatureKey.getName()); + mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); + } + + private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setDescription(featureProperties.getDescription()); + if (featureProperties.getDataType() != null) { + mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } - - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); - mlFeature.setName(mlFeatureKey.getName()); - mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); - } - - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setDescription(featureProperties.getDescription()); - if (featureProperties.getDataType() != null) { - mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); - } - } - - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { - EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); - MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); + MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 9d647a38d2153..7bcefbc305192 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,44 +1,46 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLFeaturePropertiesMapper implements ModelMapper { +public class MLFeaturePropertiesMapper + implements ModelMapper { - public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); + public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); - public static MLFeatureProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); - } + public static MLFeatureProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + return INSTANCE.apply(mlFeatureProperties); + } - @Override - public MLFeatureProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - final MLFeatureProperties result = new MLFeatureProperties(); + @Override + public MLFeatureProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + final MLFeatureProperties result = new MLFeatureProperties(); - result.setDescription(mlFeatureProperties.getDescription()); - if (mlFeatureProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); - } - if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); - } - if (mlFeatureProperties.getSources() != null) { - result.setSources(mlFeatureProperties - .getSources() - .stream() - .map(urn -> { + result.setDescription(mlFeatureProperties.getDescription()); + if (mlFeatureProperties.getDataType() != null) { + result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); + } + if (mlFeatureProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + } + if (mlFeatureProperties.getSources() != null) { + result.setSources( + mlFeatureProperties.getSources().stream() + .map( + urn -> { final Dataset dataset = new Dataset(); dataset.setUrn(urn.toString()); return dataset; - }) - .collect(Collectors.toList())); - } - - return result; + }) + .collect(Collectors.toList())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 3ba9a76c4bdde..d074e14f95c82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,9 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -36,90 +37,111 @@ import com.linkedin.ml.metadata.MLFeatureTableProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper { - public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); + public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { - final MLFeatureTable result = new MLFeatureTable(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + final MLFeatureTable result = new MLFeatureTable(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE_TABLE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE_TABLE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); - mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { - MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); - mlFeatureTable.setName(mlFeatureTableKey.getName()); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); - mlFeatureTable.setPlatform(partialPlatform); - } - - private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { - MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setDescription(featureTableProperties.getDescription()); - } - - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { - EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); - MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + return mappingHelper.getResult(); + } + + private void mapMLFeatureTableKey( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { + MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); + mlFeatureTable.setName(mlFeatureTableKey.getName()); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); + mlFeatureTable.setPlatform(partialPlatform); + } + + private void mapMLFeatureTableProperties( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); + mlFeatureTable.setFeatureTableProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setDescription(featureTableProperties.getDescription()); + } + + private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { + EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); + MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index 13e3c79599725..fff504d43c81a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -5,46 +5,55 @@ import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; public class MLFeatureTablePropertiesMapper { - public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); - - public static MLFeatureTableProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + public static final MLFeatureTablePropertiesMapper INSTANCE = + new MLFeatureTablePropertiesMapper(); + + public static MLFeatureTableProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + } + + public MLFeatureTableProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + final MLFeatureTableProperties result = new MLFeatureTableProperties(); + + result.setDescription(mlFeatureTableProperties.getDescription()); + if (mlFeatureTableProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlFeatureTableProperties.getMlFeatures().stream() + .map( + urn -> { + final MLFeature mlFeature = new MLFeature(); + mlFeature.setUrn(urn.toString()); + return mlFeature; + }) + .collect(Collectors.toList())); } - public MLFeatureTableProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - final MLFeatureTableProperties result = new MLFeatureTableProperties(); - - result.setDescription(mlFeatureTableProperties.getDescription()); - if (mlFeatureTableProperties.getMlFeatures() != null) { - result.setMlFeatures(mlFeatureTableProperties.getMlFeatures().stream().map(urn -> { - final MLFeature mlFeature = new MLFeature(); - mlFeature.setUrn(urn.toString()); - return mlFeature; - }).collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { - result.setMlPrimaryKeys(mlFeatureTableProperties - .getMlPrimaryKeys() - .stream() - .map(urn -> { + if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { + result.setMlPrimaryKeys( + mlFeatureTableProperties.getMlPrimaryKeys().stream() + .map( + urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); mlPrimaryKey.setUrn(urn.toString()); return mlPrimaryKey; - }) - .collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); - } + }) + .collect(Collectors.toList())); + } - return result; + if (mlFeatureTableProperties.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index 5cc242d0b19f2..bb3c85e411e71 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -2,25 +2,25 @@ import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class MLHyperParamMapper implements ModelMapper { +public class MLHyperParamMapper + implements ModelMapper { - public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); + public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); - } + public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(input); + } - @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - final MLHyperParam result = new MLHyperParam(); + @Override + public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + final MLHyperParam result = new MLHyperParam(); - result.setDescription(input.getDescription()); - result.setValue(input.getValue()); - result.setCreatedAt(input.getCreatedAt()); - result.setName(input.getName()); - return result; - } + result.setDescription(input.getDescription()); + result.setValue(input.getValue()); + result.setCreatedAt(input.getCreatedAt()); + result.setName(input.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 2545bd5f8a848..765a44d218567 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -4,22 +4,21 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; - public class MLMetricMapper implements ModelMapper { - public static final MLMetricMapper INSTANCE = new MLMetricMapper(); + public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); - } + public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(metric); + } - @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - final MLMetric result = new MLMetric(); - result.setDescription(metric.getDescription()); - result.setValue(metric.getValue()); - result.setCreatedAt(metric.getCreatedAt()); - result.setName(metric.getName()); - return result; - } + @Override + public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + final MLMetric result = new MLMetric(); + result.setDescription(metric.getDescription()); + result.setValue(metric.getValue()); + result.setCreatedAt(metric.getCreatedAt()); + result.setName(metric.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index 0d32f7275e5fe..e86072ce3848e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,29 +1,36 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class MLModelFactorPromptsMapper implements ModelMapper { +public class MLModelFactorPromptsMapper + implements ModelMapper { - public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); + public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); - public static MLModelFactorPrompts map(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); - } + public static MLModelFactorPrompts map( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + return INSTANCE.apply(input); + } - @Override - public MLModelFactorPrompts apply(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); - if (input.getEvaluationFactors() != null) { - mlModelFactorPrompts.setEvaluationFactors(input.getEvaluationFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - if (input.getRelevantFactors() != null) { - mlModelFactorPrompts.setRelevantFactors(input.getRelevantFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - return mlModelFactorPrompts; + @Override + public MLModelFactorPrompts apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); + if (input.getEvaluationFactors() != null) { + mlModelFactorPrompts.setEvaluationFactors( + input.getEvaluationFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); + } + if (input.getRelevantFactors() != null) { + mlModelFactorPrompts.setRelevantFactors( + input.getRelevantFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); } + return mlModelFactorPrompts; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index aa4737dfd229c..3b212eca52801 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,32 +1,33 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.ArrayList; - import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.ArrayList; import lombok.NonNull; -public class MLModelFactorsMapper implements ModelMapper { +public class MLModelFactorsMapper + implements ModelMapper { - public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); + public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); - public static MLModelFactors map(@NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); - } + public static MLModelFactors map( + @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { + return INSTANCE.apply(modelFactors); + } - @Override - public MLModelFactors apply(@NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { - final MLModelFactors result = new MLModelFactors(); - if (mlModelFactors.getEnvironment() != null) { - result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); - } - if (mlModelFactors.getGroups() != null) { - result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); - } - if (mlModelFactors.getInstrumentation() != null) { - result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); - } - return result; + @Override + public MLModelFactors apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { + final MLModelFactors result = new MLModelFactors(); + if (mlModelFactors.getEnvironment() != null) { + result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); + } + if (mlModelFactors.getGroups() != null) { + result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); + } + if (mlModelFactors.getInstrumentation() != null) { + result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 311ee121bcaf9..cc9baaa33a660 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -34,90 +36,102 @@ import com.linkedin.ml.metadata.MLModelGroupProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper { - public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); + public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { - final MLModelGroup result = new MLModelGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + final MLModelGroup result = new MLModelGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); - mappingHelper.mapToResult(ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mappingHelper.mapToResult( + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); - mlModelGroup.setName(mlModelGroupKey.getName()); - mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); - mlModelGroup.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); + mlModelGroup.setName(mlModelGroupKey.getName()); + mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); + mlModelGroup.setPlatform(partialPlatform); + } + + private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + if (modelGroupProperties.getDescription() != null) { + mlModelGroup.setDescription(modelGroupProperties.getDescription()); } - - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); - if (modelGroupProperties.getDescription() != null) { - mlModelGroup.setDescription(modelGroupProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { - EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); - MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); + MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9a12d7917e648..bae60a026b49a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,28 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; -public class MLModelGroupPropertiesMapper implements ModelMapper { - - public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); +public class MLModelGroupPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - public static MLModelGroupProperties map(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); - } + public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); - @Override - public MLModelGroupProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - final MLModelGroupProperties result = new MLModelGroupProperties(); + public static MLModelGroupProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + return INSTANCE.apply(mlModelGroupProperties); + } - result.setDescription(mlModelGroupProperties.getDescription()); - if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); - } - result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + @Override + public MLModelGroupProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + final MLModelGroupProperties result = new MLModelGroupProperties(); - return result; + result.setDescription(mlModelGroupProperties.getDescription()); + if (mlModelGroupProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); } + result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 0c2eeabe5701d..827b35c282237 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; @@ -48,124 +50,165 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper { - public static final MLModelMapper INSTANCE = new MLModelMapper(); + public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModel map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { - final MLModel result = new MLModel(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModel apply(@Nonnull final EntityResponse entityResponse) { + final MLModel result = new MLModel(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); - mappingHelper.mapToResult(INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + mappingHelper.mapToResult( + INTENDED_USE_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); - mappingHelper.mapToResult(ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setFactorPrompts(MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); - mappingHelper.mapToResult(METRICS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); - mappingHelper.mapToResult(EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEvaluationData(new EvaluationData(dataMap).getEvaluationData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(TRAINING_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setTrainingData(new TrainingData(dataMap).getTrainingData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setQuantitativeAnalyses(QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); - mappingHelper.mapToResult(ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEthicalConsiderations(EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); - mappingHelper.mapToResult(CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCaveatsAndRecommendations(CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(COST_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCost(CostMapper.map(new Cost(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setFactorPrompts( + MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + mappingHelper.mapToResult( + METRICS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + mappingHelper.mapToResult( + EVALUATION_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEvaluationData( + new EvaluationData(dataMap) + .getEvaluationData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + TRAINING_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setTrainingData( + new TrainingData(dataMap) + .getTrainingData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + QUANTITATIVE_ANALYSES_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setQuantitativeAnalyses( + QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + mappingHelper.mapToResult( + ETHICAL_CONSIDERATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEthicalConsiderations( + EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + mappingHelper.mapToResult( + CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setCaveatsAndRecommendations( + CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { - MLModelKey mlModelKey = new MLModelKey(dataMap); - mlModel.setName(mlModelKey.getName()); - mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelKey.getPlatform().toString()); - mlModel.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + MLModelKey mlModelKey = new MLModelKey(dataMap); + mlModel.setName(mlModelKey.getName()); + mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelKey.getPlatform().toString()); + mlModel.setPlatform(partialPlatform); + } + + private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + MLModelProperties modelProperties = new MLModelProperties(dataMap); + mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + if (modelProperties.getDescription() != null) { + mlModel.setDescription(modelProperties.getDescription()); } - - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); - if (modelProperties.getDescription() != null) { - mlModel.setDescription(modelProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - mlModel.setGlobalTags(graphQlGlobalTags); - mlModel.setTags(graphQlGlobalTags); - } - - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { - SourceCode sourceCode = new SourceCode(dataMap); - com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = - new com.linkedin.datahub.graphql.generated.SourceCode(); - graphQlSourceCode.setSourceCode(sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map).collect(Collectors.toList())); - mlModel.setSourceCode(graphQlSourceCode); - } - - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModel entity, DataMap dataMap) { - EditableMLModelProperties input = new EditableMLModelProperties(dataMap); - MLModelEditableProperties editableProperties = new MLModelEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + mlModel.setGlobalTags(graphQlGlobalTags); + mlModel.setTags(graphQlGlobalTags); + } + + private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + SourceCode sourceCode = new SourceCode(dataMap); + com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = + new com.linkedin.datahub.graphql.generated.SourceCode(); + graphQlSourceCode.setSourceCode( + sourceCode.getSourceCode().stream() + .map(SourceCodeUrlMapper::map) + .collect(Collectors.toList())); + mlModel.setSourceCode(graphQlSourceCode); + } + + private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModel entity, DataMap dataMap) { + EditableMLModelProperties input = new EditableMLModelProperties(dataMap); + MLModelEditableProperties editableProperties = new MLModelEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index 554c14e9a4a56..f2781f5bca5c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,65 +1,71 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.MLModelGroup; +import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; - -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.MLModelProperties; - import lombok.NonNull; public class MLModelPropertiesMapper { - public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); + public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - public static MLModelProperties map(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); - } + public static MLModelProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + return INSTANCE.apply(mlModelProperties, entityUrn); + } - public MLModelProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - final MLModelProperties result = new MLModelProperties(); + public MLModelProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + final MLModelProperties result = new MLModelProperties(); - result.setDate(mlModelProperties.getDate()); - result.setDescription(mlModelProperties.getDescription()); - if (mlModelProperties.getExternalUrl() != null) { - result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); - } - if (mlModelProperties.getVersion() != null) { - result.setVersion(mlModelProperties.getVersion().getVersionTag()); - } - result.setType(mlModelProperties.getType()); - if (mlModelProperties.getHyperParams() != null) { - result.setHyperParams(mlModelProperties.getHyperParams().stream().map( - param -> MLHyperParamMapper.map(param)).collect(Collectors.toList())); - } - - result.setCustomProperties(CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); + result.setDate(mlModelProperties.getDate()); + result.setDescription(mlModelProperties.getDescription()); + if (mlModelProperties.getExternalUrl() != null) { + result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); + } + if (mlModelProperties.getVersion() != null) { + result.setVersion(mlModelProperties.getVersion().getVersionTag()); + } + result.setType(mlModelProperties.getType()); + if (mlModelProperties.getHyperParams() != null) { + result.setHyperParams( + mlModelProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(param)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getTrainingMetrics() != null) { - result.setTrainingMetrics(mlModelProperties.getTrainingMetrics().stream().map(metric -> - MLMetricMapper.map(metric) - ).collect(Collectors.toList())); - } + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); - if (mlModelProperties.getGroups() != null) { - result.setGroups(mlModelProperties.getGroups().stream().map(group -> { - final MLModelGroup subgroup = new MLModelGroup(); - subgroup.setUrn(group.toString()); - return subgroup; - }).collect(Collectors.toList())); - } + if (mlModelProperties.getTrainingMetrics() != null) { + result.setTrainingMetrics( + mlModelProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(metric)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getMlFeatures() != null) { - result.setMlFeatures(mlModelProperties - .getMlFeatures() - .stream() - .map(Urn::toString) - .collect(Collectors.toList())); - } - result.setTags(mlModelProperties.getTags()); + if (mlModelProperties.getGroups() != null) { + result.setGroups( + mlModelProperties.getGroups().stream() + .map( + group -> { + final MLModelGroup subgroup = new MLModelGroup(); + subgroup.setUrn(group.toString()); + return subgroup; + }) + .collect(Collectors.toList())); + } - return result; + if (mlModelProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlModelProperties.getMlFeatures().stream() + .map(Urn::toString) + .collect(Collectors.toList())); } + result.setTags(mlModelProperties.getTags()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 0bd5db4d884ae..a8efd748401f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -33,88 +35,102 @@ import com.linkedin.ml.metadata.MLPrimaryKeyProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper { - public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); + public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { - final MLPrimaryKey result = new MLPrimaryKey(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + final MLPrimaryKey result = new MLPrimaryKey(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLPRIMARY_KEY); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLPRIMARY_KEY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); - mappingHelper.mapToResult(ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + return mappingHelper.getResult(); + } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); - mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); - mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); - } + private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); + mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); + mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); + } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); - if (primaryKeyProperties.getDataType() != null) { - mlPrimaryKey.setDataType(MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); - } + private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); + mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); + if (primaryKeyProperties.getDataType() != null) { + mlPrimaryKey.setDataType( + MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); } + } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } + private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } + private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { - EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); - MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); + MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 39ecd96af182f..16d6120cd9dff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -4,39 +4,43 @@ import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLPrimaryKeyPropertiesMapper implements ModelMapper { +public class MLPrimaryKeyPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { - public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); + public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); - public static MLPrimaryKeyProperties map(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); - } + public static MLPrimaryKeyProperties map( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + return INSTANCE.apply(mlPrimaryKeyProperties); + } - @Override - public MLPrimaryKeyProperties apply(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); + @Override + public MLPrimaryKeyProperties apply( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); - result.setDescription(mlPrimaryKeyProperties.getDescription()); - if (mlPrimaryKeyProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); - } - if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); - } - result.setSources(mlPrimaryKeyProperties - .getSources() - .stream() - .map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }) + result.setDescription(mlPrimaryKeyProperties.getDescription()); + if (mlPrimaryKeyProperties.getDataType() != null) { + result.setDataType( + MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); + } + if (mlPrimaryKeyProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + } + result.setSources( + mlPrimaryKeyProperties.getSources().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) .collect(Collectors.toList())); - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 05b34ba3acb9c..76fa8c84e9571 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class MetricsMapper implements ModelMapper { - public static final MetricsMapper INSTANCE = new MetricsMapper(); + public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); - } + public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(metrics); + } - @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - final Metrics result = new Metrics(); - result.setDecisionThreshold(metrics.getDecisionThreshold()); - result.setPerformanceMeasures(metrics.getPerformanceMeasures()); - return result; - } + @Override + public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + final Metrics result = new Metrics(); + result.setDecisionThreshold(metrics.getDecisionThreshold()); + result.setPerformanceMeasures(metrics.getPerformanceMeasures()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index 8bd25a4474579..e46cb0a074bd7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -2,22 +2,25 @@ import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class QuantitativeAnalysesMapper implements ModelMapper { +public class QuantitativeAnalysesMapper + implements ModelMapper { - public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); + public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); - public static QuantitativeAnalyses map(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); - } + public static QuantitativeAnalyses map( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + return INSTANCE.apply(quantitativeAnalyses); + } - @Override - public QuantitativeAnalyses apply(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - final QuantitativeAnalyses result = new QuantitativeAnalyses(); - result.setIntersectionalResults(ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); - return result; - } + @Override + public QuantitativeAnalyses apply( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + final QuantitativeAnalyses result = new QuantitativeAnalyses(); + result.setIntersectionalResults( + ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 78292f08f8cad..4b6529c59db3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -3,25 +3,25 @@ import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class ResultsTypeMapper implements ModelMapper { +public class ResultsTypeMapper + implements ModelMapper { - public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); + public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); - } + public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(input); + } - @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - final ResultsType result; - if (input.isString()) { - result = new StringBox(input.getString()); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); - } - return result; + @Override + public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + final ResultsType result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index 79dbd2cded4c2..b6bd5efdc4217 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class SourceCodeUrlMapper implements ModelMapper { - public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); +public class SourceCodeUrlMapper + implements ModelMapper { + public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); - } + public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(input); + } - @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - final SourceCodeUrl results = new SourceCodeUrl(); - results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); - results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); - return results; - } + @Override + public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + final SourceCodeUrl results = new SourceCodeUrl(); + results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); + results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 6ad0945b0621f..5758a52538c1e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -2,20 +2,22 @@ import com.linkedin.common.VersionTag; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class VersionTagMapper implements ModelMapper { - public static final VersionTagMapper INSTANCE = new VersionTagMapper(); +public class VersionTagMapper + implements ModelMapper { + public static final VersionTagMapper INSTANCE = new VersionTagMapper(); - public static com.linkedin.datahub.graphql.generated.VersionTag map(@Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); - } + public static com.linkedin.datahub.graphql.generated.VersionTag map( + @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(versionTag); + } - @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { - final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); - result.setVersionTag(input.getVersionTag()); - return result; - } + @Override + public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + final com.linkedin.datahub.graphql.generated.VersionTag result = + new com.linkedin.datahub.graphql.generated.VersionTag(); + result.setVersionTag(input.getVersionTag()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index 080cdeba09f19..b6990c3816b53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.notebook; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -18,25 +21,25 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.BrowsableEntityType; import com.linkedin.datahub.graphql.types.MutableType; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookUpdateInputMapper; -import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -53,25 +56,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class NotebookType implements SearchableEntityType, BrowsableEntityType, - MutableType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - NOTEBOOK_KEY_ASPECT_NAME, - NOTEBOOK_INFO_ASPECT_NAME, - NOTEBOOK_CONTENT_ASPECT_NAME, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); +public class NotebookType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + NOTEBOOK_KEY_ASPECT_NAME, + NOTEBOOK_INFO_ASPECT_NAME, + NOTEBOOK_CONTENT_ASPECT_NAME, + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); private final EntityClient _entityClient; @@ -80,44 +83,68 @@ public NotebookType(EntityClient entityClient) { } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { + @Nonnull final QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final SearchResult searchResult = _entityClient.search(NOTEBOOK_ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + NOTEBOOK_ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse(NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); return BrowseResultMapper.map(result); } @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(NotebookUrn.createFromString(urn), context.getAuthentication()); + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + NotebookUrn.createFromString(urn), context.getAuthentication()); return BrowsePathsMapper.map(result); } @@ -137,22 +164,26 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map notebookMap = _entityClient.batchGetV2(NOTEBOOK_ENTITY_NAME, new HashSet<>(urns), - ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map notebookMap = + _entityClient.batchGetV2( + NOTEBOOK_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); return urns.stream() .map(urn -> notebookMap.getOrDefault(urn, null)) - .map(entityResponse -> entityResponse == null - ? null - : DataFetcherResult.newResult() - .data(NotebookMapper.map(entityResponse)) - .build()) + .map( + entityResponse -> + entityResponse == null + ? null + : DataFetcherResult.newResult() + .data(NotebookMapper.map(entityResponse)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Notebook", e); @@ -165,13 +196,16 @@ public Class inputClass() { } @Override - public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) + public Notebook update( + @Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) throws Exception { if (!isAuthorized(urn, input, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); Collection proposals = NotebookUpdateInputMapper.map(input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); @@ -184,7 +218,8 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, return load(urn, context).getData(); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { // Decide whether the current principal should be allowed to update the Dataset. final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( @@ -197,9 +232,9 @@ private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput u private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List specificPrivileges = new ArrayList<>(); if (updateInput.getOwnership() != null) { @@ -211,12 +246,12 @@ private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateIn if (updateInput.getTags() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index 2b937c86c9779..a263e31b26faf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.GlobalTags; @@ -26,11 +28,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; import com.linkedin.datahub.graphql.types.common.mappers.ChangeAuditStampsMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -45,8 +47,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class NotebookMapper implements ModelMapper { public static final NotebookMapper INSTANCE = new NotebookMapper(); @@ -64,41 +64,59 @@ public Notebook apply(EntityResponse response) { EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); - mappingHelper.mapToResult(NOTEBOOK_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + NOTEBOOK_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); - mappingHelper.mapToResult(EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> notebook.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn) - )); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> - notebook.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); return mappingHelper.getResult(); } private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); - notebook.setPlatform(DataPlatform - .builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(dataPlatformInstance.getPlatform().toString()) - .build()); - notebook.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + notebook.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(dataPlatformInstance.getPlatform().toString()) + .build()); + notebook.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); } private void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); + com.linkedin.datahub.graphql.generated.SubTypes subTypes = + new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); notebook.setSubTypes(subTypes); } @@ -110,11 +128,14 @@ private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); + private void mapNotebookInfo( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = + new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); - notebookInfo.setChangeAuditStamps(ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + notebookInfo.setChangeAuditStamps( + ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -122,40 +143,46 @@ private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMa } if (gmsNotebookInfo.hasCustomProperties()) { - notebookInfo.setCustomProperties(CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); + notebookInfo.setCustomProperties( + CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); } notebook.setInfo(notebookInfo); } private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); + com.linkedin.notebook.NotebookContent pegasusNotebookContent = + new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List mapNotebookCells(com.linkedin.notebook.NotebookCellArray pegasusCells) { + private List mapNotebookCells( + com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() - .map(pegasusCell -> { - NotebookCell notebookCell = new NotebookCell(); - NotebookCellType cellType = NotebookCellType.valueOf(pegasusCell.getType().toString()); - notebookCell.setType(cellType); - switch (cellType) { - case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); - break; - case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); - break; - case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); - break; - default: - throw new DataHubGraphQLException(String.format("Un-supported NotebookCellType: %s", cellType), - DataHubGraphQLErrorCode.SERVER_ERROR); - } - return notebookCell; - }) + .map( + pegasusCell -> { + NotebookCell notebookCell = new NotebookCell(); + NotebookCellType cellType = + NotebookCellType.valueOf(pegasusCell.getType().toString()); + notebookCell.setType(cellType); + switch (cellType) { + case CHART_CELL: + notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + break; + case TEXT_CELL: + notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + break; + case QUERY_CELL: + notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + break; + default: + throw new DataHubGraphQLException( + String.format("Un-supported NotebookCellType: %s", cellType), + DataHubGraphQLErrorCode.SERVER_ERROR); + } + return notebookCell; + }) .collect(Collectors.toList()); } @@ -163,7 +190,8 @@ private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); - chartCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + chartCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); return chartCell; } @@ -171,7 +199,8 @@ private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); - textCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + textCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } @@ -180,7 +209,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); - queryCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + queryCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); @@ -189,7 +219,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) } private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); + final EditableNotebookProperties editableNotebookProperties = + new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); notebookEditableProperties.setDescription(editableNotebookProperties.getDescription()); notebook.setEditableProperties(notebookEditableProperties); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0c3787d630500..0d6c70e07053f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,16 +19,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class NotebookUpdateInputMapper implements InputModelMapper, - Urn> { +public class NotebookUpdateInputMapper + implements InputModelMapper, Urn> { public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); - public static Collection map(@Nonnull final NotebookUpdateInput notebookUpdateInput, - @Nonnull final Urn actor) { + public static Collection map( + @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(notebookUpdateInput, actor); } @@ -39,27 +38,32 @@ public Collection apply(NotebookUpdateInput input, Urn a auditStamp.setTime(System.currentTimeMillis()); if (input.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(input.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + input.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); editableDashboardProperties.setDescription(input.getEditableProperties().getDescription()); if (!editableDashboardProperties.hasCreated()) { editableDashboardProperties.setCreated(auditStamp); } editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index 79f95ac8439a5..f7ed4c59a805a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; @@ -20,14 +22,12 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class OwnershipType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +46,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List ownershipTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List ownershipTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(OWNERSHIP_TYPE_ENTITY_NAME, new HashSet<>(ownershipTypeUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + OWNERSHIP_TYPE_ENTITY_NAME, + new HashSet<>(ownershipTypeUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -60,12 +64,16 @@ public List> batchLoad(@Nonnull List gmsResult == null ? null : DataFetcherResult.newResult() - .data(OwnershipTypeMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(OwnershipTypeMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Custom Ownership Types", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 37b59b679e3ac..9eebe95df8d8c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -14,9 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTypeMapper implements ModelMapper { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); @@ -34,12 +33,14 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { EnvelopedAspectMap aspectMap = input.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); return mappingHelper.getResult(); } - private void mapOwnershipTypeInfo(@Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { + private void mapOwnershipTypeInfo( + @Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { final com.linkedin.ownership.OwnershipTypeInfo gmsOwnershipTypeInfo = new com.linkedin.ownership.OwnershipTypeInfo(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 167e1615fc4cc..318818b8a2140 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -25,9 +27,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubPolicyMapper implements ModelMapper { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); @@ -71,16 +70,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { // Change here is not executed at the moment - leaving it for the future UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -102,14 +105,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -117,7 +126,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 4cec59009af3f..3dea9046dcf36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubPolicyType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubPolicyType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_POLICY_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List> batchLoad(@Nonnull List ur gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubPolicyMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubPolicyMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index 791197c7d47e4..f35111f78a694 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.post; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,9 +18,6 @@ import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class PostMapper implements ModelMapper { public static final PostMapper INSTANCE = new PostMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index cf77821b1a280..2bdcda3592608 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; @@ -21,9 +23,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class QueryMapper implements ModelMapper { public static final QueryMapper INSTANCE = new QueryMapper(); @@ -47,13 +46,15 @@ public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); + com.linkedin.datahub.graphql.generated.QueryProperties res = + new com.linkedin.datahub.graphql.generated.QueryProperties(); // Query Source must be kept in sync. res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement(new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); + res.setStatement( + new QueryStatement( + queryProperties.getStatement().getValue(), + QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); @@ -73,10 +74,10 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); - List res = querySubjects.getSubjects() - .stream() - .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) - .collect(Collectors.toList()); + List res = + querySubjects.getSubjects().stream() + .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) + .collect(Collectors.toList()); query.setSubjects(res); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index c138cd56f20b3..0c1fd33e38110 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -20,14 +22,11 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class QueryType implements com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - QUERY_PROPERTIES_ASPECT_NAME, - QUERY_SUBJECTS_ASPECT_NAME); +public class QueryType + implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +45,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(QUERY_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + QUERY_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -60,11 +62,16 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(QueryMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(QueryMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index e1762022f4bcb..db086e682d57c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DataFlowDataJobsRelationshipsMapper implements - ModelMapper { +public class DataFlowDataJobsRelationshipsMapper + implements ModelMapper { - public static final DataFlowDataJobsRelationshipsMapper INSTANCE = new DataFlowDataJobsRelationshipsMapper(); + public static final DataFlowDataJobsRelationshipsMapper INSTANCE = + new DataFlowDataJobsRelationshipsMapper(); - public static DataFlowDataJobsRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DataFlowDataJobsRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DataFlowDataJobsRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DataFlowDataJobsRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 824e1181c5871..4df64c7ecb85e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DownstreamEntityRelationshipsMapper implements - ModelMapper { +public class DownstreamEntityRelationshipsMapper + implements ModelMapper { - public static final DownstreamEntityRelationshipsMapper INSTANCE = new DownstreamEntityRelationshipsMapper(); + public static final DownstreamEntityRelationshipsMapper INSTANCE = + new DownstreamEntityRelationshipsMapper(); - public static DownstreamEntityRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DownstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DownstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DownstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index 58f4f477bc7e6..e3743804b4908 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -5,28 +5,32 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class EntityRelationshipLegacyMapper implements ModelMapper { +public class EntityRelationshipLegacyMapper + implements ModelMapper { - public static final EntityRelationshipLegacyMapper INSTANCE = new EntityRelationshipLegacyMapper(); + public static final EntityRelationshipLegacyMapper INSTANCE = + new EntityRelationshipLegacyMapper(); - public static EntityRelationshipLegacy map(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); - } + public static EntityRelationshipLegacy map( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + return INSTANCE.apply(relationship); + } - @Override - public EntityRelationshipLegacy apply(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); + @Override + public EntityRelationshipLegacy apply( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); - EntityWithRelationships partialLineageEntity = (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); - if (partialLineageEntity != null) { - result.setEntity(partialLineageEntity); - } - if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); - } - return result; + EntityWithRelationships partialLineageEntity = + (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + if (partialLineageEntity != null) { + result.setEntity(partialLineageEntity); + } + if (relationship.hasCreated()) { + result.setCreated(AuditStampMapper.map(relationship.getCreated())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 7db5e08c73fc6..832e1bb396b3b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -2,24 +2,28 @@ import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class UpstreamEntityRelationshipsMapper implements ModelMapper { +public class UpstreamEntityRelationshipsMapper + implements ModelMapper { - public static final UpstreamEntityRelationshipsMapper INSTANCE = new UpstreamEntityRelationshipsMapper(); + public static final UpstreamEntityRelationshipsMapper INSTANCE = + new UpstreamEntityRelationshipsMapper(); - public static UpstreamEntityRelationships map(@Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static UpstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public UpstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public UpstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 8c6496390943b..9521945770195 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -21,11 +23,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubRoleType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubRoleType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_ROLE_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -45,13 +45,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -59,8 +62,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubRoleMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubRoleMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 5ba31a1602780..7a467886fc084 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; @@ -10,9 +12,6 @@ import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubRoleMapper implements ModelMapper { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index 084c4d5033ad0..d51e0d06c0fda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.SearchableEntityType; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -22,9 +22,6 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -33,88 +30,101 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class RoleType implements SearchableEntityType, +public class RoleType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ROLE_KEY, - Constants.ROLE_PROPERTIES_ASPECT_NAME, - Constants.ROLE_ACTORS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ROLE_KEY, + Constants.ROLE_PROPERTIES_ASPECT_NAME, + Constants.ROLE_ACTORS_ASPECT_NAME); - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public RoleType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public RoleType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.ROLE; - } + @Override + public EntityType type() { + return EntityType.ROLE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return Role.class; - } + @Override + public Class objectClass() { + return Role.class; + } - @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List externalRolesUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List externalRolesUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ROLE_ENTITY_NAME, - new HashSet<>(externalRolesUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + Constants.ROLE_ENTITY_NAME, + new HashSet<>(externalRolesUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : externalRolesUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(RoleMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Role", e); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : externalRolesUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(RoleMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Role", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search(Constants.ROLE_ENTITY_NAME, - query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.ROLE_ENTITY_NAME, - query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + Constants.ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index cabace1a52441..3eb090e452439 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,41 +1,39 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.RoleAssociation; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class AccessMapper { - public static final AccessMapper INSTANCE = new AccessMapper(); - - public static com.linkedin.datahub.graphql.generated.Access map( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - return INSTANCE.apply(access, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.Access apply( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); - result.setRoles(access.getRoles().stream().map( - association -> this.mapRoleAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private RoleAssociation mapRoleAssociation(com.linkedin.common.RoleAssociation association, Urn entityUrn) { - RoleAssociation roleAssociation = new RoleAssociation(); - Role role = new Role(); - role.setType(EntityType.ROLE); - role.setUrn(association.getUrn().toString()); - roleAssociation.setRole(role); - roleAssociation.setAssociatedUrn(entityUrn.toString()); - return roleAssociation; - } - + public static final AccessMapper INSTANCE = new AccessMapper(); + + public static com.linkedin.datahub.graphql.generated.Access map( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(access, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Access apply( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.Access result = + new com.linkedin.datahub.graphql.generated.Access(); + result.setRoles( + access.getRoles().stream() + .map(association -> this.mapRoleAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private RoleAssociation mapRoleAssociation( + com.linkedin.common.RoleAssociation association, Urn entityUrn) { + RoleAssociation roleAssociation = new RoleAssociation(); + Role role = new Role(); + role.setType(EntityType.ROLE); + role.setUrn(association.getUrn().toString()); + roleAssociation.setRole(role); + roleAssociation.setAssociatedUrn(entityUrn.toString()); + return roleAssociation; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index 3cb0ec942a457..df18b7c89fafc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -15,79 +15,77 @@ import com.linkedin.metadata.key.RoleKey; import com.linkedin.role.Actors; import com.linkedin.role.RoleUserArray; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class RoleMapper implements ModelMapper { - public static final RoleMapper INSTANCE = new RoleMapper(); - - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final RoleMapper INSTANCE = new RoleMapper(); + + public static Role map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { + final RoleProperties propertiesResult = new RoleProperties(); + propertiesResult.setName(e.getName()); + propertiesResult.setDescription(e.getDescription()); + propertiesResult.setType(e.getType()); + propertiesResult.setRequestUrl(e.getRequestUrl()); + + return propertiesResult; + } + + private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { + RoleUser result = new RoleUser(); + CorpUser corpUser = new CorpUser(); + corpUser.setUrn(provisionedUser.getUser().toString()); + result.setUser(corpUser); + return result; + } + + private static Actor mapActor(Actors actors) { + Actor actor = new Actor(); + actor.setUsers(mapRoleUsers(actors.getUsers())); + return actor; + } + + private static List mapRoleUsers(RoleUserArray users) { + if (users == null) { + return null; } + return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + } - private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { - final RoleProperties propertiesResult = new RoleProperties(); - propertiesResult.setName(e.getName()); - propertiesResult.setDescription(e.getDescription()); - propertiesResult.setType(e.getType()); - propertiesResult.setRequestUrl(e.getRequestUrl()); + @Override + public Role apply(EntityResponse input) { - return propertiesResult; - } + final Role result = new Role(); + final Urn entityUrn = input.getUrn(); - private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { - RoleUser result = new RoleUser(); - CorpUser corpUser = new CorpUser(); - corpUser.setUrn(provisionedUser.getUser().toString()); - result.setUser(corpUser); - return result; - } + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ROLE); - private static Actor mapActor(Actors actors) { - Actor actor = new Actor(); - actor.setUsers(mapRoleUsers(actors.getUsers())); - return actor; - } + final EnvelopedAspectMap aspects = input.getAspects(); - private static List mapRoleUsers(RoleUserArray users) { - if (users == null) { - return null; - } - return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); + if (roleKeyAspect != null) { + result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); + } + final EnvelopedAspect envelopedPropertiesAspect = + aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); + if (envelopedPropertiesAspect != null) { + result.setProperties( + mapRoleProperties( + new com.linkedin.role.RoleProperties(envelopedPropertiesAspect.getValue().data()))); } - @Override - public Role apply(EntityResponse input) { - - - final Role result = new Role(); - final Urn entityUrn = input.getUrn(); - - result.setUrn(entityUrn.toString()); - result.setType(EntityType.ROLE); - - final EnvelopedAspectMap aspects = input.getAspects(); - - final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); - if (roleKeyAspect != null) { - result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); - } - final EnvelopedAspect envelopedPropertiesAspect = aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); - if (envelopedPropertiesAspect != null) { - result.setProperties(mapRoleProperties( - new com.linkedin.role.RoleProperties( - envelopedPropertiesAspect.getValue().data())) - ); - } - - final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); - if (envelopedUsers != null) { - result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); - } - - return result; + final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); + if (envelopedUsers != null) { + result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 748753c4e22b1..b543a40cbac41 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -8,15 +8,15 @@ import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { +public class SchemaFieldType + implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() { } + public SchemaFieldType() {} @Override public EntityType type() { @@ -34,18 +34,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List schemaFieldUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List schemaFieldUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { return schemaFieldUrns.stream() .map(this::mapSchemaFieldUrn) - .map(schemaFieldEntity -> DataFetcherResult.newResult() - .data(schemaFieldEntity) - .build() - ) + .map( + schemaFieldEntity -> + DataFetcherResult.newResult().data(schemaFieldEntity).build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -66,6 +65,4 @@ private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { throw new RuntimeException("Failed to load schemaField entity", e); } } - } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index f79b23033c995..c56833cc817eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.types.tag; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.Entity; @@ -26,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -44,136 +46,150 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class TagType implements com.linkedin.datahub.graphql.types.SearchableEntityType, - MutableType { - - private static final Set FACET_FIELDS = Collections.emptySet(); - - private final EntityClient _entityClient; - - public TagType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Tag.class; +public class TagType + implements com.linkedin.datahub.graphql.types.SearchableEntityType, + MutableType { + + private static final Set FACET_FIELDS = Collections.emptySet(); + + private final EntityClient _entityClient; + + public TagType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Tag.class; + } + + @Override + public EntityType type() { + return EntityType.TAG; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class inputClass() { + return TagUpdateInput.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + + final List tagUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map tagMap = + _entityClient.batchGetV2( + TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : tagUrns) { + gmsResults.add(tagMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsTag -> + gmsTag == null + ? null + : DataFetcherResult.newResult().data(TagMapper.map(gmsTag)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Tags", e); } - - @Override - public EntityType type() { - return EntityType.TAG; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class inputClass() { - return TagUpdateInput.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - - final List tagUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map tagMap = _entityClient.batchGetV2(TAG_ENTITY_NAME, new HashSet<>(tagUrns), - null, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : tagUrns) { - gmsResults.add(tagMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsTag -> gmsTag == null ? null - : DataFetcherResult.newResult() - .data(TagMapper.map(gmsTag)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Tags", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("tag", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - - @Override - public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = TagUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "tag", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public Tag update( + @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = TagUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.TAG_PRIVILEGES.getResourceType(), - update.getUrn(), - orPrivilegeGroups); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.TAG_PRIVILEGES.getResourceType(), + update.getUrn(), + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDescription() != null || updateInput.getName() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (updateInput.getDescription() != null || updateInput.getName() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index f4d5f0a549a0e..72665535e5980 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -4,35 +4,36 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Tag; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class GlobalTagsMapper { - public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); - public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(standardTags, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.GlobalTags map( + @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(standardTags, entityUrn); + } - public com.linkedin.datahub.graphql.generated.GlobalTags apply(@Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); - result.setTags(input.getTags().stream().map(tag -> this.mapTagAssociation(tag, entityUrn)).collect(Collectors.toList())); - return result; - } + public com.linkedin.datahub.graphql.generated.GlobalTags apply( + @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlobalTags result = + new com.linkedin.datahub.graphql.generated.GlobalTags(); + result.setTags( + input.getTags().stream() + .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .collect(Collectors.toList())); + return result; + } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); - final Tag resultTag = new Tag(); - resultTag.setUrn(input.getTag().toString()); - result.setTag(resultTag); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( + @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = + new com.linkedin.datahub.graphql.generated.TagAssociation(); + final Tag resultTag = new Tag(); + resultTag.setUrn(input.getTag().toString()); + result.setTag(resultTag); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 775c123070a80..3792a42376004 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -4,27 +4,28 @@ import com.linkedin.common.urn.TagUrn; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; -public class TagAssociationUpdateMapper implements ModelMapper { +public class TagAssociationUpdateMapper + implements ModelMapper { - public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); + public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); - } + public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(tagAssociationUpdate); + } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { - final TagAssociation output = new TagAssociation(); - try { - output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to update tag with urn %s, invalid urn", - tagAssociationUpdate.getTag().getUrn())); - } - return output; + public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + final TagAssociation output = new TagAssociation(); + try { + output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to update tag with urn %s, invalid urn", + tagAssociationUpdate.getTag().getUrn())); } - + return output; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index 43736b412b004..d6ce24582678d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -16,63 +18,61 @@ import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class TagMapper implements ModelMapper { - public static final TagMapper INSTANCE = new TagMapper(); + public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { - final Tag result = new Tag(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.TAG); + public static Tag map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - final String legacyName = entityResponse.getUrn().getId(); - result.setName(legacyName); + @Override + public Tag apply(@Nonnull final EntityResponse entityResponse) { + final Tag result = new Tag(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.TAG); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (tag, dataMap) -> - tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + final String legacyName = entityResponse.getUrn().getId(); + result.setName(legacyName); - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - return mappingHelper.getResult(); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - TagKey tagKey = new TagKey(dataMap); - tag.setName(tagKey.getName()); - } + return mappingHelper.getResult(); + } + + private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + TagKey tagKey = new TagKey(dataMap); + tag.setName(tagKey.getName()); + } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - final TagProperties properties = new TagProperties(dataMap); - final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = - new com.linkedin.datahub.graphql.generated.TagProperties.Builder() - .setColorHex(properties.getColorHex(GetMode.DEFAULT)) - .setName(properties.getName(GetMode.DEFAULT)) - .setDescription(properties.getDescription(GetMode.DEFAULT)) - .build(); - tag.setProperties(graphQlProperties); - // Set deprecated top-level description field. - if (properties.hasDescription()) { - tag.setDescription(properties.getDescription()); - } + private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + final TagProperties properties = new TagProperties(dataMap); + final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = + new com.linkedin.datahub.graphql.generated.TagProperties.Builder() + .setColorHex(properties.getColorHex(GetMode.DEFAULT)) + .setName(properties.getName(GetMode.DEFAULT)) + .setDescription(properties.getDescription(GetMode.DEFAULT)) + .build(); + tag.setProperties(graphQlProperties); + // Set deprecated top-level description field. + if (properties.hasDescription()) { + tag.setDescription(properties.getDescription()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 505dd0d36954b..316994881ccfe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -19,24 +22,19 @@ import java.util.Collection; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class TagUpdateInputMapper implements InputModelMapper, Urn> { +public class TagUpdateInputMapper + implements InputModelMapper, Urn> { public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection map( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { return INSTANCE.apply(tagUpdate, actor); } @Override public Collection apply( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); @@ -59,9 +57,10 @@ public Collection apply( TagProperties tagProperties = new TagProperties(); tagProperties.setName(tagUpdate.getName()); tagProperties.setDescription(tagUpdate.getDescription()); - proposals.add(updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); } return proposals; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java index ddc9f33b25516..be67d17421917 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java @@ -1,15 +1,14 @@ package com.linkedin.datahub.graphql.types.test; -import com.linkedin.datahub.graphql.generated.TestDefinition; -import com.linkedin.test.TestInfo; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.TestDefinition; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; - +import com.linkedin.test.TestInfo; public class TestMapper { @@ -29,12 +28,11 @@ public static Test map(final EntityResponse entityResponse) { result.setName(testInfo.getName()); result.setDescription(testInfo.getDescription()); result.setDefinition(new TestDefinition(testInfo.getDefinition().getJson())); - } else { + } else { return null; } return result; } - private TestMapper() { - } -} \ No newline at end of file + private TestMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index 4b7df8a0d23d3..eefcc356c22a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -3,9 +3,9 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -20,15 +20,12 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class TestType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.TEST_INFO_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = ImmutableSet.of(Constants.TEST_INFO_ASPECT_NAME); private final EntityClient _entityClient; - public TestType(final EntityClient entityClient) { + public TestType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -48,28 +45,28 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List testUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List testUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.TEST_ENTITY_NAME, - new HashSet<>(testUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.TEST_ENTITY_NAME, + new HashSet<>(testUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(TestMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(TestMapper.map(gmsResult)).build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tests", e); @@ -83,4 +80,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java index 7812282d0c1e5..02de39ffc644c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChangeOperationType; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -25,15 +27,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame structs for every schema +// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame +// structs for every schema // at every semantic version. @Slf4j public class SchemaBlameMapper { - public static GetSchemaBlameResult map(@Nonnull final List changeTransactions, + public static GetSchemaBlameResult map( + @Nonnull final List changeTransactions, @Nullable final String versionCutoff) { final GetSchemaBlameResult result = new GetSchemaBlameResult(); if (changeTransactions.isEmpty()) { @@ -46,7 +47,8 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - final String semanticVersionFilterString = versionCutoff == null ? latestSemanticVersionString : versionCutoff; + final String semanticVersionFilterString = + versionCutoff == null ? latestSemanticVersionString : versionCutoff; final Optional semanticVersionFilterOptional = createSemanticVersion(semanticVersionFilterString); if (semanticVersionFilterOptional.isEmpty()) { @@ -55,25 +57,30 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final ComparableVersion semanticVersionFilter = semanticVersionFilterOptional.get(); - final List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .filter(semanticVersionChangeTransactionPair -> - semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) <= 0) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + final List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .filter( + semanticVersionChangeTransactionPair -> + semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) + <= 0) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); if (reversedChangeTransactions.isEmpty()) { return result; } - final String selectedSemanticVersion = truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); + final String selectedSemanticVersion = + truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); final long selectedSemanticVersionTimestamp = reversedChangeTransactions.get(0).getTimestamp(); final String selectedVersionStamp = reversedChangeTransactions.get(0).getVersionStamp(); result.setVersion( - new SemanticVersionStruct(selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); + new SemanticVersionStruct( + selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); for (ChangeTransaction changeTransaction : reversedChangeTransactions) { for (ChangeEvent changeEvent : changeTransaction.getChangeEvents()) { @@ -90,8 +97,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch SchemaFieldKey schemaFieldKey; try { - schemaFieldKey = (SchemaFieldKey) EntityKeyUtils.convertUrnToEntityKeyInternal(Urn.createFromString(schemaUrn), - new SchemaFieldKey().schema()); + schemaFieldKey = + (SchemaFieldKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + Urn.createFromString(schemaUrn), new SchemaFieldKey().schema()); } catch (Exception e) { log.debug(String.format("Could not generate schema urn for %s", schemaUrn)); continue; @@ -101,7 +110,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch schemaFieldBlame.setFieldPath(fieldPath); final SchemaFieldChange schemaFieldChange = - getLastSchemaFieldChange(changeEvent, changeTransaction.getTimestamp(), changeTransaction.getSemVer(), + getLastSchemaFieldChange( + changeEvent, + changeTransaction.getTimestamp(), + changeTransaction.getSemVer(), changeTransaction.getVersionStamp()); schemaFieldBlame.setSchemaFieldChange(schemaFieldChange); @@ -109,15 +121,17 @@ public static GetSchemaBlameResult map(@Nonnull final List ch } } - result.setSchemaFieldBlameList(schemaBlameMap.values() - .stream() - .filter(schemaFieldBlame -> !schemaFieldBlame.getSchemaFieldChange() - .getChangeType() - .equals(ChangeOperationType.REMOVE)) - .collect(Collectors.toList())); + result.setSchemaFieldBlameList( + schemaBlameMap.values().stream() + .filter( + schemaFieldBlame -> + !schemaFieldBlame + .getSchemaFieldChange() + .getChangeType() + .equals(ChangeOperationType.REMOVE)) + .collect(Collectors.toList())); return result; } - private SchemaBlameMapper() { - } -} \ No newline at end of file + private SchemaBlameMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java index 249957b1a1262..295ca0856821c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.generated.SemanticVersionStruct; import com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils; @@ -12,10 +14,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to list of schema versions. +// Class for converting ChangeTransactions received from the Timeline API to list of schema +// versions. @Slf4j public class SchemaVersionListMapper { @@ -29,28 +29,36 @@ public static GetSchemaVersionListResult map(List changeTrans String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - long latestSemanticVersionTimestamp = changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); - String latestVersionStamp = changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); + long latestSemanticVersionTimestamp = + changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); + String latestVersionStamp = + changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); result.setLatestVersion( - new SemanticVersionStruct(latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); + new SemanticVersionStruct( + latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); - List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); - List semanticVersionStructList = reversedChangeTransactions.stream() - .map(changeTransaction -> new SemanticVersionStruct(truncateSemanticVersion(changeTransaction.getSemVer()), - changeTransaction.getTimestamp(), changeTransaction.getVersionStamp())) - .collect(Collectors.toList()); + List semanticVersionStructList = + reversedChangeTransactions.stream() + .map( + changeTransaction -> + new SemanticVersionStruct( + truncateSemanticVersion(changeTransaction.getSemVer()), + changeTransaction.getTimestamp(), + changeTransaction.getVersionStamp())) + .collect(Collectors.toList()); result.setSemanticVersionList(semanticVersionStructList); return result; } - private SchemaVersionListMapper() { - } -} \ No newline at end of file + private SchemaVersionListMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java index 175cf678117f0..37acfe3da0f9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java @@ -9,13 +9,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class TimelineUtils { - public static Optional> semanticVersionChangeTransactionPair( - ChangeTransaction changeTransaction) { - Optional semanticVersion = createSemanticVersion(changeTransaction.getSemVer()); + public static Optional> + semanticVersionChangeTransactionPair(ChangeTransaction changeTransaction) { + Optional semanticVersion = + createSemanticVersion(changeTransaction.getSemVer()); return semanticVersion.map(version -> Pair.of(version, changeTransaction)); } @@ -29,21 +29,24 @@ public static Optional createSemanticVersion(String semanticV } } - // The SemanticVersion is currently returned from the ChangeTransactions in the format "x.y.z-computed". This function + // The SemanticVersion is currently returned from the ChangeTransactions in the format + // "x.y.z-computed". This function // removes the suffix "computed". public static String truncateSemanticVersion(String semanticVersion) { String suffix = "-computed"; - return semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + return semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; } - public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent, long timestamp, - String semanticVersion, String versionStamp) { + public static SchemaFieldChange getLastSchemaFieldChange( + ChangeEvent changeEvent, long timestamp, String semanticVersion, String versionStamp) { SchemaFieldChange schemaFieldChange = new SchemaFieldChange(); schemaFieldChange.setTimestampMillis(timestamp); schemaFieldChange.setLastSemanticVersion(truncateSemanticVersion(semanticVersion)); schemaFieldChange.setChangeType( - ChangeOperationType.valueOf(ChangeOperationType.class, changeEvent.getOperation().toString())); + ChangeOperationType.valueOf( + ChangeOperationType.class, changeEvent.getOperation().toString())); schemaFieldChange.setVersionStamp(versionStamp); String translatedChangeOperationType; @@ -65,15 +68,16 @@ public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent String suffix = "-computed"; String translatedSemanticVersion = - semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; - String lastSchemaFieldChange = String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); + String lastSchemaFieldChange = + String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); schemaFieldChange.setLastSchemaFieldChange(lastSchemaFieldChange); return schemaFieldChange; } - private TimelineUtils() { - } + private TimelineUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index 3bf84d21a3215..e4e67c86f1ae6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -4,12 +4,13 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class FieldUsageCountsMapper implements ModelMapper { +public class FieldUsageCountsMapper + implements ModelMapper { public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); - public static FieldUsageCounts map(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public static FieldUsageCounts map( + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { return INSTANCE.apply(usageCounts); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 453ae97d40306..3449c6782a46b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -5,18 +5,19 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UsageAggregationMapper implements - ModelMapper { +public class UsageAggregationMapper + implements ModelMapper { public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); - public static UsageAggregation map(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public static UsageAggregation map( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { return INSTANCE.apply(pdlUsageAggregation); } @Override - public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public UsageAggregation apply( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index 697b15d57e4e4..ff9f6fd5c4855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -5,31 +5,34 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageAggregationMetricsMapper implements - ModelMapper { +public class UsageAggregationMetricsMapper + implements ModelMapper { public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); - public static UsageAggregationMetrics map(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public static UsageAggregationMetrics map( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { return INSTANCE.apply(usageAggregationMetrics); } @Override - public UsageAggregationMetrics apply(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public UsageAggregationMetrics apply( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); result.setUniqueUserCount(usageAggregationMetrics.getUniqueUserCount()); result.setTopSqlQueries(usageAggregationMetrics.getTopSqlQueries()); if (usageAggregationMetrics.hasFields()) { result.setFields( - usageAggregationMetrics.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + usageAggregationMetrics.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { - result.setUsers(usageAggregationMetrics.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + usageAggregationMetrics.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index ba3b86b72af8b..63fe051b7ede9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -5,30 +5,35 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class UsageQueryResultAggregationMapper + implements ModelMapper< + com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { -public class UsageQueryResultAggregationMapper implements - ModelMapper { + public static final UsageQueryResultAggregationMapper INSTANCE = + new UsageQueryResultAggregationMapper(); - public static final UsageQueryResultAggregationMapper INSTANCE = new UsageQueryResultAggregationMapper(); - - public static UsageQueryResultAggregations map(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public static UsageQueryResultAggregations map( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UsageQueryResultAggregations apply(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public UsageQueryResultAggregations apply( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); result.setUniqueUserCount(pdlUsageResultAggregations.getUniqueUserCount()); if (pdlUsageResultAggregations.hasFields()) { result.setFields( - pdlUsageResultAggregations.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + pdlUsageResultAggregations.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { - result.setUsers(pdlUsageResultAggregations.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + pdlUsageResultAggregations.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index f54259180c739..444605cd99377 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,17 +1,17 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.UsageQueryResult; - import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageQueryResultMapper implements ModelMapper { +public class UsageQueryResultMapper + implements ModelMapper { public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); - public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public static UsageQueryResult map( + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { return INSTANCE.apply(pdlUsageResult); } @@ -19,11 +19,14 @@ public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryR public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { - result.setAggregations(UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + result.setAggregations( + UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { - result.setBuckets(pdlUsageResult.getBuckets().stream().map( - bucket -> UsageAggregationMapper.map(bucket)).collect(Collectors.toList())); + result.setBuckets( + pdlUsageResult.getBuckets().stream() + .map(bucket -> UsageAggregationMapper.map(bucket)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index b525a761841e3..014003dd86554 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.CorpUser; - import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UserUsageCountsMapper implements - ModelMapper { +public class UserUsageCountsMapper + implements ModelMapper { public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); - public static UserUsageCounts map(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public static UserUsageCounts map( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UserUsageCounts apply(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public UserUsageCounts apply( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { CorpUser partialUser = new CorpUser(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index f6c348937c7a5..8ea06f46d5133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; @@ -24,8 +26,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class DataHubViewMapper implements ModelMapper { @@ -57,20 +57,26 @@ private void mapDataHubViewInfo(@Nonnull final DataHubView view, @Nonnull final } @Nonnull - private DataHubViewDefinition mapViewDefinition(@Nonnull final com.linkedin.view.DataHubViewDefinition definition) { + private DataHubViewDefinition mapViewDefinition( + @Nonnull final com.linkedin.view.DataHubViewDefinition definition) { final DataHubViewDefinition result = new DataHubViewDefinition(); result.setFilter(mapFilter(definition.getFilter())); - result.setEntityTypes(definition.getEntityTypes().stream().map(EntityTypeMapper::getType).collect( - Collectors.toList())); + result.setEntityTypes( + definition.getEntityTypes().stream() + .map(EntityTypeMapper::getType) + .collect(Collectors.toList())); return result; } @Nullable - private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.filter.Filter filter) { - // This assumes that people DO NOT emit Views on their own, since we expect that the Filter structure is within + private DataHubViewFilter mapFilter( + @Nonnull final com.linkedin.metadata.query.filter.Filter filter) { + // This assumes that people DO NOT emit Views on their own, since we expect that the Filter + // structure is within // a finite set of possibilities. // - // If we find a View that was ingested manually and malformed, then we log that and return a default. + // If we find a View that was ingested manually and malformed, then we log that and return a + // default. final DataHubViewFilter result = new DataHubViewFilter(); if (filter.hasOr() && filter.getOr().size() == 1) { // Then we are looking at an AND with multiple sub conditions. @@ -84,9 +90,7 @@ private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.f return result; } - /** - * This simply converts a List of leaf criterion into the FacetFiler equivalent. - */ + /** This simply converts a List of leaf criterion into the FacetFiler equivalent. */ @Nonnull private List mapAndFilters(@Nullable final List ands) { // If the array is missing, return empty array. @@ -98,9 +102,9 @@ private List mapAndFilters(@Nullable final List ands) { } /** - * This converts a list of Conjunctive Criterion into a flattened list - * of FacetFilters. This method makes the assumption that WE (our GraphQL API) - * has minted the View and that each or criterion contains at maximum one nested condition. + * This converts a list of Conjunctive Criterion into a flattened list of FacetFilters. This + * method makes the assumption that WE (our GraphQL API) has minted the View and that each or + * criterion contains at maximum one nested condition. */ @Nonnull private List mapOrFilters(@Nullable final List ors) { @@ -109,8 +113,10 @@ private List mapOrFilters(@Nullable final List or.hasAnd() && or.getAnd().size() > 1)) { - log.warn(String.format( - "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", ors)); + log.warn( + String.format( + "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", + ors)); return Collections.emptyList(); } // It is assumed that in this case, the view is a flat list of ORs. Thus, we filter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java index 21a80e3f900d4..9b3680bde9b2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubViewType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubViewType + implements com.linkedin.datahub.graphql.types.EntityType { public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_VIEW_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_VIEW_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubViewMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubViewMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Views", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index bb9de5fb96802..4b837605d4e31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -5,39 +5,30 @@ import org.joda.time.DateTimeConstants; public class DateUtil { - public DateTime getNow() { - return DateTime.now(); - } + public DateTime getNow() { + return DateTime.now(); + } - public DateTime getStartOfNextWeek() { - return setTimeToZero(getNow() - .withDayOfWeek(DateTimeConstants.SUNDAY) - .plusDays(1)); - } + public DateTime getStartOfNextWeek() { + return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); + } - public DateTime getStartOfNextMonth() { - return setTimeToZero(getNow() - .withDayOfMonth(1) - .plusMonths(1)); - } + public DateTime getStartOfNextMonth() { + return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); + } - public DateTime setTimeToZero(DateTime input) { - return input.withHourOfDay(0) - .withMinuteOfHour(0) - .withSecondOfMinute(0) - .withMillisOfDay(0); - } + public DateTime setTimeToZero(DateTime input) { + return input.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfDay(0); + } - public DateTime getTomorrowStart() { - return setTimeToZero(getNow().plusDays(1)); - } + public DateTime getTomorrowStart() { + return setTimeToZero(getNow().plusDays(1)); + } - public DateRange getTrailingWeekDateRange() { - final DateTime todayEnd = getTomorrowStart().minusMillis(1); - final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); - return new DateRange( - String.valueOf(aWeekAgoStart.getMillis()), - String.valueOf(todayEnd.getMillis()) - ); - } + public DateRange getTrailingWeekDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); + return new DateRange( + String.valueOf(aWeekAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java index 7f90071c6770c..904db311d34d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nullable; - public class SearchInsightsUtil { public static List getInsightsFromFeatures(@Nullable final DoubleMap features) { @@ -18,5 +17,5 @@ public static List getInsightsFromFeatures(@Nullable final Double return Collections.emptyList(); } - private SearchInsightsUtil() { } + private SearchInsightsUtil() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 606123cac926d..69cd73ecd7d68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -14,17 +14,17 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; - import java.util.List; - +import org.mockito.Mockito; public class TestUtils { public static EntityService getMockEntityService() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); + EntityRegistry registry = + new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); EntityService mockEntityService = Mockito.mock(EntityService.class); Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); return mockEntityService; @@ -44,9 +44,10 @@ public static QueryContext getMockAllowContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -60,9 +61,10 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -80,9 +82,10 @@ public static QueryContext getMockDenyContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -96,55 +99,54 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List proposals) { - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(proposals, mockService.getEntityRegistry()) - .build(); - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(batch), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, List proposals) { + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(proposals, mockService.getEntityRegistry()).build(); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(batch), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(AuditStamp.class), Mockito.eq(false)); } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( Mockito.any(MetadataChangeProposal.class), Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.eq(false)); } public static void verifyNoIngestProposal(EntityService mockService) { - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java index 7cd548a4790ba..57d85e5b204c2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.testng.AssertJUnit.assertEquals; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,16 +18,11 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetchingEnvironment; -import org.testng.annotations.Test; -import org.mockito.Mockito; - import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static org.testng.AssertJUnit.assertEquals; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ResolverUtilsTest { @@ -35,46 +33,48 @@ public void testCriterionFromFilter() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockAllowContext); // this is the expected path - Criterion valuesCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), - false, - FilterOperator.EQUAL - ) - ); - assertEquals(valuesCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valuesCriterion = + criterionFromFilter( + new FacetFilterInput( + "tags", + null, + ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), + false, + FilterOperator.EQUAL)); + assertEquals( + valuesCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); // this is the legacy pathway - Criterion valueCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - "urn:li:tag:abc", - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(valueCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc")) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valueCriterion = + criterionFromFilter( + new FacetFilterInput("tags", "urn:li:tag:abc", null, true, FilterOperator.EQUAL)); + assertEquals( + valueCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc"))) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); - // check that both being null doesn't cause a NPE. this should never happen except via API interaction - Criterion doubleNullCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(doubleNullCriterion, new Criterion().setValue("").setValues( - new StringArray(ImmutableList.of()) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + // check that both being null doesn't cause a NPE. this should never happen except via API + // interaction + Criterion doubleNullCriterion = + criterionFromFilter(new FacetFilterInput("tags", null, null, true, FilterOperator.EQUAL)); + assertEquals( + doubleNullCriterion, + new Criterion() + .setValue("") + .setValues(new StringArray(ImmutableList.of())) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); } @Test @@ -85,21 +85,25 @@ public void testBuildFilterWithUrns() throws Exception { urns.add(urn1); urns.add(urn2); - Criterion ownersCriterion = new Criterion() - .setField("owners") - .setValues(new StringArray("urn:li:corpuser:chris")) - .setCondition(Condition.EQUAL); + Criterion ownersCriterion = + new Criterion() + .setField("owners") + .setValues(new StringArray("urn:li:corpuser:chris")) + .setCondition(Condition.EQUAL); CriterionArray andCriterionArray = new CriterionArray(ImmutableList.of(ownersCriterion)); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(andCriterionArray) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(andCriterionArray)))); Filter finalFilter = buildFilterWithUrns(urns, filter); - Criterion urnsCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnsCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); for (ConjunctiveCriterion conjunctiveCriterion : finalFilter.getOr()) { assertEquals(conjunctiveCriterion.getAnd().contains(ownersCriterion), true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index c7424174255ce..0d87ce4b2e2ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -9,35 +14,35 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetchingEnvironment; -import org.joda.time.DateTimeUtils; -import org.mockito.Mockito; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.joda.time.DateTimeUtils; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; public class UpdateLineageResolverTest { private static EntityService _mockService = Mockito.mock(EntityService.class); private static LineageService _lineageService; private static DataFetchingEnvironment _mockEnv; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; @BeforeMethod public void setupTest() { @@ -50,8 +55,12 @@ public void setupTest() { // Adds upstream for dataset1 to dataset2 and removes edge to dataset3 @Test public void testUpdateDatasetLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -65,7 +74,8 @@ public void testUpdateDatasetLineage() throws Exception { @Test public void testFailUpdateWithMissingDownstream() throws Exception { - List edgesToAdd = Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); + List edgesToAdd = + Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -93,8 +103,12 @@ public void testUpdateChartLineage() throws Exception { // Adds upstream for dashboard to dataset2 and chart1 and removes edge to dataset1 @Test public void testUpdateDashboardLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_2), createLineageEdge(DASHBOARD_URN, CHART_URN)); - List edgesToRemove = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DASHBOARD_URN, DATASET_URN_2), + createLineageEdge(DASHBOARD_URN, CHART_URN)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -109,12 +123,13 @@ public void testUpdateDashboardLineage() throws Exception { // Adds upstream datajob and dataset and one downstream dataset @Test public void testUpdateDataJobLineage() throws Exception { - List edgesToAdd = Arrays.asList( - createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), - createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), - createLineageEdge(DATASET_URN_3, DATAJOB_URN_1) - ); - List edgesToRemove = Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), + createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), + createLineageEdge(DATASET_URN_3, DATAJOB_URN_1)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -129,8 +144,12 @@ public void testUpdateDataJobLineage() throws Exception { @Test public void testFailUpdateLineageNoPermissions() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); @@ -147,7 +166,6 @@ public void testFailUpdateLineageNoPermissions() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } - private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java index 6fdb1f2b70ce4..f590e71146eb4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionResult; @@ -19,9 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class AssertionRunEventResolverTest { @Test public void testGetSuccess() throws Exception { @@ -29,35 +28,36 @@ public void testGetSuccess() throws Exception { final Urn assertionUrn = Urn.createFromString("urn:li:assertion:guid-1"); final Urn asserteeUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); - final AssertionRunEvent gmsRunEvent = new AssertionRunEvent() - .setTimestampMillis(12L) - .setAssertionUrn(assertionUrn) - .setRunId("test-id") - .setAsserteeUrn(asserteeUrn) - .setStatus(AssertionRunStatus.COMPLETE) - .setResult(new AssertionResult() - .setActualAggValue(10) - .setMissingCount(0L) - .setRowCount(1L) - .setType(AssertionResultType.SUCCESS) - .setUnexpectedCount(2L) - ); + final AssertionRunEvent gmsRunEvent = + new AssertionRunEvent() + .setTimestampMillis(12L) + .setAssertionUrn(assertionUrn) + .setRunId("test-id") + .setAsserteeUrn(asserteeUrn) + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setActualAggValue(10) + .setMissingCount(0L) + .setRowCount(1L) + .setType(AssertionResultType.SUCCESS) + .setUnexpectedCount(2L)); - Mockito.when(mockClient.getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.eq(AssertionRunEventResolver.buildFilter(null, AssertionRunStatus.COMPLETE.toString())), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableList.of( - new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) - ) - ); + Mockito.when( + mockClient.getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.eq( + AssertionRunEventResolver.buildFilter( + null, AssertionRunStatus.COMPLETE.toString())), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableList.of( + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)))); AssertionRunEventResolver resolver = new AssertionRunEventResolver(mockClient); @@ -66,9 +66,12 @@ public void testGetSuccess() throws Exception { Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))).thenReturn("COMPLETE"); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))).thenReturn(0L); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))).thenReturn(10L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))) + .thenReturn("COMPLETE"); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))) + .thenReturn(0L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))) + .thenReturn(10L); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("limit"), Mockito.eq(null))).thenReturn(5); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -78,32 +81,37 @@ public void testGetSuccess() throws Exception { AssertionRunEventsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.any(Filter.class), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.any(Filter.class), + Mockito.any(Authentication.class)); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getTotal(), 1); assertEquals(result.getFailed(), 0); assertEquals(result.getSucceeded(), 1); - com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = resolver.get(mockEnv).get().getRunEvents().get(0); + com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = + resolver.get(mockEnv).get().getRunEvents().get(0); assertEquals(graphqlRunEvent.getAssertionUrn(), assertionUrn.toString()); assertEquals(graphqlRunEvent.getAsserteeUrn(), asserteeUrn.toString()); assertEquals(graphqlRunEvent.getRunId(), "test-id"); - assertEquals(graphqlRunEvent.getStatus(), com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); + assertEquals( + graphqlRunEvent.getStatus(), + com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); assertEquals((float) graphqlRunEvent.getTimestampMillis(), 12L); assertEquals((float) graphqlRunEvent.getResult().getActualAggValue(), 10); assertEquals((long) graphqlRunEvent.getResult().getMissingCount(), 0L); assertEquals((long) graphqlRunEvent.getResult().getRowCount(), 1L); assertEquals((long) graphqlRunEvent.getResult().getUnexpectedCount(), 2L); - assertEquals(graphqlRunEvent.getResult().getType(), com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); + assertEquals( + graphqlRunEvent.getResult().getType(), + com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 8afec0a889577..019d254ffdaac 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.assertion.AssertionInfo; @@ -18,10 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteAssertionResolverTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; @@ -33,20 +32,17 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -58,20 +54,19 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -80,11 +75,10 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn(null); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn(null); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -96,20 +90,19 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -130,21 +123,20 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -153,20 +145,17 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -177,17 +166,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); @@ -202,4 +190,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index c5b5725f23b7a..19152a7a11877 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,9 +39,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityAssertionsResolverTest { @Test public void testGetSuccess() throws Exception { @@ -49,73 +48,76 @@ public void testGetSuccess() throws Exception { Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); Urn assertionUrn = Urn.createFromString("urn:li:assertion:test-guid"); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(datasetUrn.toString()), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(assertionUrn) - .setType("Asserts")) - )) - ); - + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(datasetUrn.toString()), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship().setEntity(assertionUrn).setType("Asserts"))))); Map assertionAspects = new HashMap<>(); assertionAspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionKey().setAssertionId("test-guid").data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new AssertionKey().setAssertionId("test-guid").data()))); assertionAspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(new DatasetAssertionInfo() - .setDataset(datasetUrn) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setAggregation(AssertionStdAggregation.MAX) - .setOperator(AssertionStdOperator.EQUAL_TO) - .setFields(new UrnArray(ImmutableList.of( - Urn.createFromString("urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)") - ))) - .setParameters(new AssertionStdParameters().setValue(new AssertionStdParameter() - .setValue("10") - .setType( - AssertionStdParameterType.NUMBER))) - ).data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(datasetUrn) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setAggregation(AssertionStdAggregation.MAX) + .setOperator(AssertionStdOperator.EQUAL_TO) + .setFields( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)")))) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setValue("10") + .setType(AssertionStdParameterType.NUMBER)))) + .data()))); assertionAspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) - .data() - )) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn)), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(ImmutableMap.of( - assertionUrn, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn) - .setAspects(new EnvelopedAspectMap(assertionAspects)))); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) + .data()))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn) + .setAspects(new EnvelopedAspectMap(assertionAspects)))); EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); @@ -134,38 +136,45 @@ public void testGetSuccess() throws Exception { EntityAssertionsResult result = resolver.get(mockEnv).get(); - Mockito.verify(graphClient, Mockito.times(1)).getRelatedEntities( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(graphClient, Mockito.times(1)) + .getRelatedEntities( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); assertEquals(result.getTotal(), 1); - com.linkedin.datahub.graphql.generated.Assertion assertion = resolver.get(mockEnv).get().getAssertions().get(0); + com.linkedin.datahub.graphql.generated.Assertion assertion = + resolver.get(mockEnv).get().getAssertions().get(0); assertEquals(assertion.getUrn(), assertionUrn.toString()); assertEquals(assertion.getType(), EntityType.ASSERTION); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:hive"); - assertEquals(assertion.getInfo().getType(), com.linkedin.datahub.graphql.generated.AssertionType.DATASET); + assertEquals( + assertion.getInfo().getType(), + com.linkedin.datahub.graphql.generated.AssertionType.DATASET); assertEquals(assertion.getInfo().getDatasetAssertion().getDatasetUrn(), datasetUrn.toString()); - assertEquals(assertion.getInfo().getDatasetAssertion().getScope(), com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); - assertEquals(assertion.getInfo().getDatasetAssertion().getAggregation(), com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); - assertEquals(assertion.getInfo().getDatasetAssertion().getOperator(), com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), + assertEquals( + assertion.getInfo().getDatasetAssertion().getScope(), + com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); + assertEquals( + assertion.getInfo().getDatasetAssertion().getAggregation(), + com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); + assertEquals( + assertion.getInfo().getDatasetAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 52d06f73dcfab..419eb71d5e143 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class ListAccessTokensResolverTest { @Test @@ -42,16 +41,22 @@ public void testGetSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); final Authentication testAuth = getAuthentication(mockEnv); - Mockito.when(mockClient.search( - Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(buildFilter(filters, Collections.emptyList())), - Mockito.any(SortCriterion.class), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) - .thenReturn(new SearchResult().setFrom(0).setNumEntities(0).setPageSize(0).setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(buildFilter(filters, Collections.emptyList())), + Mockito.any(SortCriterion.class), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.eq(testAuth), + Mockito.any(SearchFlags.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setNumEntities(0) + .setPageSize(0) + .setEntities(new SearchEntityArray())); final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient); final ListAccessTokenResult listAccessTokenResult = resolver.get(mockEnv).get(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 4a948537ab4fe..bffc2b31af2b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -29,15 +31,12 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.List; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class BrowseV2ResolverTest { private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @@ -46,23 +45,30 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - null, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + null, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -92,23 +98,30 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { orFilters.add(andFilterInput); Filter filter = ResolverUtils.buildFilter(null, orFilters); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "test", - filter, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "test", + filter, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -132,23 +145,30 @@ public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - viewInfo.getDefinition().getFilter(), - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + viewInfo.getDefinition().getFilter(), + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); @@ -166,16 +186,25 @@ public static void testBrowseV2SuccessWithView() throws Exception { compareResultToExpectedData(result, getExpectedResult()); } - private static void compareResultToExpectedData(BrowseResultsV2 result, BrowseResultsV2 expected) { + private static void compareResultToExpectedData( + BrowseResultsV2 result, BrowseResultsV2 expected) { Assert.assertEquals(result.getCount(), expected.getCount()); Assert.assertEquals(result.getStart(), expected.getStart()); Assert.assertEquals(result.getTotal(), expected.getTotal()); Assert.assertEquals(result.getGroups().size(), expected.getGroups().size()); - result.getGroups().forEach(group -> { - Assert.assertTrue(expected.getGroups().stream().filter(g -> g.getName().equals(group.getName())).count() > 0); - }); + result + .getGroups() + .forEach( + group -> { + Assert.assertTrue( + expected.getGroups().stream() + .filter(g -> g.getName().equals(group.getName())) + .count() + > 0); + }); Assert.assertEquals(result.getMetadata().getPath(), expected.getMetadata().getPath()); - Assert.assertEquals(result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); + Assert.assertEquals( + result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); } private static BrowseResultsV2 getExpectedResult() { @@ -185,19 +214,22 @@ private static BrowseResultsV2 getExpectedResult() { results.setCount(10); List groups = new ArrayList<>(); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup1.setName("first group"); browseGroup1.setCount(5L); browseGroup1.setHasSubGroups(true); groups.add(browseGroup1); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup2.setName("second group"); browseGroup2.setCount(4L); browseGroup2.setHasSubGroups(false); groups.add(browseGroup2); results.setGroups(groups); - com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); + com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = + new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); resultMetadata.setPath(ImmutableList.of("test", "path")); resultMetadata.setTotalNumEntities(100L); results.setMetadata(resultMetadata); @@ -212,60 +244,52 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - BrowseResultV2 result - ) throws Exception { + BrowseResultV2 result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.browseV2( - Mockito.eq(entityName), - Mockito.eq(path), - Mockito.eq(filter), - Mockito.eq(query), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.browseV2( + Mockito.eq(entityName), + Mockito.eq(path), + Mockito.eq(filter), + Mockito.eq(query), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } + private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } - private BrowseV2ResolverTest() { } - + private BrowseV2ResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java index 659e6aea740ec..75abf1d48a15c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -16,12 +18,10 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityBrowsePathsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { @@ -30,9 +30,7 @@ public void testGetSuccess() throws Exception { List path = ImmutableList.of("prod", "mysql"); Mockito.when(mockType.browsePaths(Mockito.eq(TEST_ENTITY_URN), Mockito.any())) - .thenReturn(ImmutableList.of( - new BrowsePath(path)) - ); + .thenReturn(ImmutableList.of(new BrowsePath(path))); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -55,9 +53,9 @@ public void testGetSuccess() throws Exception { @Test public void testGetBrowsePathsException() throws Exception { BrowsableEntityType mockType = Mockito.mock(BrowsableEntityType.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockType).browsePaths( - Mockito.any(), - Mockito.any()); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockType) + .browsePaths(Mockito.any(), Mockito.any()); EntityBrowsePathsResolver resolver = new EntityBrowsePathsResolver(mockType); @@ -75,4 +73,4 @@ public void testGetBrowsePathsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 39a08ca26167d..1203f4e22bdc2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -24,17 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ContainerEntitiesResolverTest { - private static final ContainerEntitiesInput TEST_INPUT = new ContainerEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final ContainerEntitiesInput TEST_INPUT = + new ContainerEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -44,35 +39,39 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; - final Criterion filterCriterion = new Criterion() - .setField("container.keyword") - .setCondition(Condition.EQUAL) - .setValue(containerUrn); + final Criterion filterCriterion = + new Criterion() + .setField("container.keyword") + .setCondition(Condition.EQUAL) + .setValue(containerUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); @@ -92,6 +91,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index 92f8dfc4e1d67..b4c58ca182b2f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.container.Container; @@ -14,18 +19,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentContainersResolverTest { @Test @@ -42,77 +40,88 @@ public void testGetSuccess() throws Exception { datasetEntity.setType(EntityType.DATASET); Mockito.when(mockEnv.getSource()).thenReturn(datasetEntity); - final Container parentContainer1 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); - final Container parentContainer2 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); + final Container parentContainer1 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); + final Container parentContainer2 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); Map datasetAspects = new HashMap<>(); - datasetAspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); + datasetAspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); Map parentContainer1Aspects = new HashMap<>(); - parentContainer1Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_schema").data() - ))); - parentContainer1Aspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - parentContainer2.data() - ))); + parentContainer1Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_schema").data()))); + parentContainer1Aspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer2.data()))); Map parentContainer2Aspects = new HashMap<>(); - parentContainer2Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_database").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(datasetUrn.getEntityType()), - Mockito.eq(datasetUrn), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer1.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer2.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + parentContainer2Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_database").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(datasetUrn.getEntityType()), + Mockito.eq(datasetUrn), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer1.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer2.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); ParentContainersResolver resolver = new ParentContainersResolver(mockClient); ParentContainersResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); - assertEquals(result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); - assertEquals(result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); + assertEquals( + result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); + assertEquals( + result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 6a9617ea41b44..2abfa39b35149 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; @@ -28,9 +30,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - public class DashboardStatsSummaryTest { private static final Dashboard TEST_SOURCE = new Dashboard(); @@ -65,31 +64,35 @@ public void testGetSuccess() throws Exception { Assert.assertEquals((int) result.getUniqueUserCountLast30Days(), 2); // Validate the cache. -- First return a new result. - DashboardUsageStatistics newUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(40); - EnvelopedAspect newResult = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); + DashboardUsageStatistics newUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(40); + EnvelopedAspect newResult = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn(ImmutableList.of(newResult)); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(newResult)); // Then verify that the new result is _not_ returned (cache hit) DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getViewCount(), 20); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); } @@ -97,28 +100,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DASHBOARD_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DASHBOARD_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -140,48 +142,46 @@ private TimeseriesAspectService initTestAspectService() { TimeseriesAspectService mockClient = Mockito.mock(TimeseriesAspectService.class); // Mock fetching the latest absolute (snapshot) statistics - DashboardUsageStatistics latestUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(20); - EnvelopedAspect envelopedLatestStats = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); + DashboardUsageStatistics latestUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(20); + EnvelopedAspect envelopedLatestStats = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn( - ImmutableList.of(envelopedLatestStats) - ); - - Mockito.when(mockClient.getAggregatedStats( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.any(), - Mockito.any(Filter.class), - Mockito.any() - )).thenReturn( - new GenericTable().setRows(new StringArrayArray( - new StringArray(ImmutableList.of( - TEST_USER_URN_1, "10", "20", "30", "1", "1", "1" - )), - new StringArray(ImmutableList.of( - TEST_USER_URN_2, "20", "30", "40", "1", "1", "1" - )) - )) - .setColumnNames(new StringArray()) - .setColumnTypes(new StringArray()) - ); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(envelopedLatestStats)); + + Mockito.when( + mockClient.getAggregatedStats( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.any(), + Mockito.any(Filter.class), + Mockito.any())) + .thenReturn( + new GenericTable() + .setRows( + new StringArrayArray( + new StringArray( + ImmutableList.of(TEST_USER_URN_1, "10", "20", "30", "1", "1", "1")), + new StringArray( + ImmutableList.of(TEST_USER_URN_2, "20", "30", "40", "1", "1", "1")))) + .setColumnNames(new StringArray()) + .setColumnTypes(new StringArray())); return mockClient; } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java index ea9ab2a1b768b..3ff0120448e54 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataset; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; @@ -23,60 +25,54 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DatasetHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; - @Test public void testGetSuccessHealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts")) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -103,20 +99,20 @@ public void testGetSuccessNullHealth() throws Exception { TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); // 0 associated assertions, meaning we don't report any health. - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(0) - .setRelationships(new EntityRelationshipArray(Collections.emptyList())) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -134,13 +130,9 @@ public void testGetSuccessNullHealth() throws Exception { List result = resolver.get(mockEnv).get(); assertEquals(result.size(), 0); - Mockito.verify(mockAspectService, Mockito.times(0)).getAggregatedStats( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockAspectService, Mockito.times(0)) + .getAggregatedStats( + Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); } @Test @@ -148,52 +140,47 @@ public void testGetSuccessUnhealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(2) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts"), - new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) - .setType("Asserts") - ) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )), - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN_2, "FAILURE", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"), + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray(ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")), + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN_2, "FAILURE", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 013e23b779c51..52516295f97ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -19,11 +19,11 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class DatasetStatsSummaryResolverTest { private static final Dataset TEST_SOURCE = new Dataset(); - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; private static final String TEST_USER_URN_1 = "urn:li:corpuser:test1"; private static final String TEST_USER_URN_2 = "urn:li:corpuser:test2"; @@ -35,28 +35,27 @@ public class DatasetStatsSummaryResolverTest { public void testGetSuccess() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(testResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(testResult); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -84,17 +83,19 @@ public void testGetSuccess() throws Exception { // Validate the cache. -- First return a new result. UsageQueryResult newResult = new UsageQueryResult(); newResult.setAggregations(new UsageQueryResultAggregations()); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(newResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(newResult); // Then verify that the new result is _not_ returned (cache hit) DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); } @@ -102,28 +103,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index bae6f27a854bc..49ccc751d35f6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.delete; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; @@ -14,39 +18,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateSoftDeletedResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -55,17 +56,21 @@ public void testGetSuccessNoExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(true); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -76,16 +81,18 @@ public void testGetSuccessExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -96,17 +103,21 @@ public void testGetSuccessExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(false); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -115,15 +126,17 @@ public void testGetSuccessExistingStatus() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -134,7 +147,9 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,7 +166,9 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); @@ -165,20 +182,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index ce5a02bb573e1..8c3620fa978a9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; @@ -15,39 +19,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingDeprecation() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -56,46 +57,57 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 0L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 0L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(0L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(0L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - final Deprecation originalDeprecation = new Deprecation() - .setDeprecated(false) - .setNote("") - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setNote("") + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,23 +118,31 @@ public void testGetSuccessExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(1L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(1L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -131,15 +151,17 @@ public void testGetSuccessExistingDeprecation() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -150,9 +172,14 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -168,9 +195,14 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -183,21 +215,29 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index 5d30ae08d6dea..e4be330f5ba2a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,20 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = new UpdateDeprecationInput( - TEST_ENTITY_URN, - true, - 0L, - "Test note" - ); + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = + new UpdateDeprecationInput(TEST_ENTITY_URN, true, 0L, "Test note"); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test @@ -47,16 +43,19 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -71,41 +70,53 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation().setDeprecated(true).setDecommissionTime(0L).setNote("Test note").setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - Deprecation originalDeprecation = new Deprecation().setDeprecated(false).setDecommissionTime(1L).setActor(TEST_ACTOR_URN).setNote(""); + Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setDecommissionTime(1L) + .setActor(TEST_ACTOR_URN) + .setNote(""); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDeprecation.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -120,24 +131,21 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setDecommissionTime(0L) - .setNote("Test note") - .setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -145,16 +153,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DEPRECATION_ASPECT_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DEPRECATION_ASPECT_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -169,9 +180,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -188,18 +198,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver @@ -210,4 +219,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 8cd3c71a21555..d5ba88066e846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; @@ -18,21 +22,17 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchSetDomainResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_DOMAIN_1_URN = "urn:li:domain:test-id-1"; private static final String TEST_DOMAIN_2_URN = "urn:li:domain:test-id-2"; @@ -40,19 +40,20 @@ public class BatchSetDomainResolverTest { public void testGetSuccessNoExistingDomains() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,46 +65,53 @@ public void testGetSuccessNoExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -117,51 +125,58 @@ public void testGetSuccessExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); proposal1.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessUnsetDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,19 +190,24 @@ public void testGetSuccessUnsetDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -196,10 +216,11 @@ public void testGetSuccessUnsetDomains() throws Exception { public void testGetFailureDomainDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -210,9 +231,12 @@ public void testGetFailureDomainDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -224,15 +248,17 @@ public void testGetFailureDomainDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -244,9 +270,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -262,9 +291,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -277,21 +309,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java index 1ea84b99cfec3..8f86e33158ad5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java @@ -6,13 +6,12 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.ArgumentMatcher; - public class CreateDomainProposalMatcher implements ArgumentMatcher { private MetadataChangeProposal left; public CreateDomainProposalMatcher(MetadataChangeProposal left) { - this.left = left; + this.left = left; } @Override @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean domainPropertiesMatch(GenericAspect left, GenericAspect right) { - DomainProperties leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DomainProperties.class - ); - - DomainProperties rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DomainProperties.class - ); + DomainProperties leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DomainProperties.class); + + DomainProperties rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DomainProperties.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 560a3865ce9e1..6184760abfabd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -15,49 +19,35 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; -import static org.testng.Assert.*; - - public class CreateDomainResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final CreateDomainInput TEST_INPUT = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - TEST_PARENT_DOMAIN_URN.toString() - ); + private static final CreateDomainInput TEST_INPUT = + new CreateDomainInput( + "test-id", "test-name", "test-description", TEST_PARENT_DOMAIN_URN.toString()); - private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = + new CreateDomainInput("test-id", "test-name", "test-description", null); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public void testGetSuccess() throws Exception { // Create resolver @@ -65,15 +55,13 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -81,14 +69,17 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -107,11 +98,11 @@ public void testGetSuccess() throws Exception { proposal.setChangeType(ChangeType.UPSERT); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -120,24 +111,23 @@ public void testGetSuccessNoParentDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NO_PARENT_DOMAIN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -154,11 +144,11 @@ public void testGetSuccessNoParentDomain() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(props)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -167,15 +157,13 @@ public void testGetInvalidParent() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -191,31 +179,32 @@ public void testGetNameConflict() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)) - )); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); DomainProperties domainProperties = new DomainProperties(); domainProperties.setDescription(TEST_INPUT.getDescription()); @@ -225,18 +214,21 @@ public void testGetNameConflict() throws Exception { EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); + envelopedAspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); entityResponse.setAspects(envelopedAspectMap); Map entityResponseMap = new HashMap<>(); entityResponseMap.put(TEST_DOMAIN_URN, entityResponse); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class) - )).thenReturn(entityResponseMap); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(entityResponseMap); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } @@ -255,9 +247,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -265,9 +256,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -278,4 +269,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 9bcdbe6d2a0e0..5632654a26ad9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -10,10 +13,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteDomainResolverTest { private static final String TEST_URN = "urn:li:domain:test-id"; @@ -30,15 +29,21 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has 0 child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -53,14 +58,20 @@ public void testDeleteWithChildDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -76,8 +87,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 93fe3d0017160..9596abf55d04f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -26,18 +29,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.testng.Assert.*; - - public class DomainEntitiesResolverTest { - private static final DomainEntitiesInput TEST_INPUT = new DomainEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final DomainEntitiesInput TEST_INPUT = + new DomainEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -47,35 +42,42 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String domainUrn = "urn:li:domain:test-domain"; - final Criterion filterCriterion = new Criterion() - .setField("domains.keyword") - .setCondition(Condition.EQUAL) - .setValue(domainUrn); + final Criterion filterCriterion = + new Criterion() + .setField("domains.keyword") + .setCondition(Condition.EQUAL) + .setValue(domainUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList())), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList())), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); DomainEntitiesResolver resolver = new DomainEntitiesResolver(mockClient); @@ -95,6 +97,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index bd8a8f98de497..ffc3e823d8351 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -20,46 +25,43 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class ListDomainsResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final ListDomainsInput TEST_INPUT = new ListDomainsInput( - 0, 20, null, TEST_PARENT_DOMAIN_URN.toString() - ); + private static final ListDomainsInput TEST_INPUT = + new ListDomainsInput(0, 20, null, TEST_PARENT_DOMAIN_URN.toString()); - private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = new ListDomainsInput( - 0, 20, null, null - ); + private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = + new ListDomainsInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -74,7 +76,8 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -82,22 +85,27 @@ public void testGetSuccessNoParentDomain() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(null)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(null)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -112,7 +120,8 @@ public void testGetSuccessNoParentDomain() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -124,33 +133,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver @@ -161,4 +172,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 4059c180b0eb0..a0eff5d0574db 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; @@ -17,52 +22,51 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class MoveDomainResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; private static final String PARENT_DOMAIN_URN = "urn:li:domain:00005397daf94708a8822b8106cfd451"; private static final String DOMAIN_URN = "urn:li:domain:11115397daf94708a8822b8106cfd451"; private static final MoveDomainInput INPUT = new MoveDomainInput(PARENT_DOMAIN_URN, DOMAIN_URN); - private static final MoveDomainInput INVALID_INPUT = new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); + private static final MoveDomainInput INVALID_INPUT = + new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(name, Urn.createFromString(PARENT_DOMAIN_URN))), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + name, Urn.createFromString(PARENT_DOMAIN_URN))), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(name); properties.setParentDomain(Urn.createFromString(PARENT_DOMAIN_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); } @Test @@ -77,11 +81,11 @@ public void testGetSuccess() throws Exception { setupTests(mockEnv, mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -97,10 +101,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(null); MoveDomainResolver resolver = new MoveDomainResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 7bd7c3afac001..4c8ceff9c4f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,15 +16,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentDomainsResolverTest { @Test @@ -38,58 +37,68 @@ public void testGetSuccessForDomain() throws Exception { domainEntity.setType(EntityType.DOMAIN); Mockito.when(mockEnv.getSource()).thenReturn(domainEntity); - final DomainProperties parentDomain1 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:11115397daf94708a8822b8106cfd451") - ).setName("test def"); - final DomainProperties parentDomain2 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:22225397daf94708a8822b8106cfd451") - ).setName("test def 2"); + final DomainProperties parentDomain1 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:11115397daf94708a8822b8106cfd451")) + .setName("test def"); + final DomainProperties parentDomain2 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:22225397daf94708a8822b8106cfd451")) + .setName("test def 2"); Map domainAspects = new HashMap<>(); - domainAspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); + domainAspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); Map parentDomain1Aspects = new HashMap<>(); - parentDomain1Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 1").setParentDomain(parentDomain2.getParentDomain()).data() - ))); + parentDomain1Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new DomainProperties() + .setName("domain parent 1") + .setParentDomain(parentDomain2.getParentDomain()) + .data()))); Map parentDomain2Aspects = new HashMap<>(); - parentDomain2Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 2").data() - ))); + parentDomain2Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new DomainProperties().setName("domain parent 2").data()))); - Mockito.when(mockClient.getV2( - Mockito.eq(domainUrn.getEntityType()), - Mockito.eq(domainUrn), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(domainUrn.getEntityType()), + Mockito.eq(domainUrn), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain1.getParentDomain().getEntityType()), - Mockito.eq(parentDomain1.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain1.getParentDomain().getEntityType()), + Mockito.eq(parentDomain1.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain2.getParentDomain().getEntityType()), - Mockito.eq(parentDomain2.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain2.getParentDomain().getEntityType()), + Mockito.eq(parentDomain2.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); ParentDomainsResolver resolver = new ParentDomainsResolver(mockClient); ParentDomainsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(3)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(3)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getDomains().get(0).getUrn(), parentDomain1.getParentDomain().toString()); assertEquals(result.getDomains().get(1).getUrn(), parentDomain2.getParentDomain().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 92fb26288aa1d..ad5ad2315ce43 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; private static final String TEST_NEW_DOMAIN_URN = "urn:li:domain:test-id-2"; @@ -43,16 +43,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -68,47 +71,52 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -124,23 +132,21 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test @@ -149,16 +155,19 @@ public void testGetFailureDomainDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -174,9 +183,8 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -185,16 +193,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -210,9 +221,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -230,18 +240,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetDomainResolver resolver = new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetDomainResolver resolver = + new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -252,4 +262,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index decda39943dde..7e6e258168898 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UnsetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; @Test @@ -42,16 +42,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -66,43 +69,46 @@ public void testGetSuccessNoExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -117,18 +123,15 @@ public void testGetSuccessExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -137,16 +140,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -160,9 +166,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -179,18 +184,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + UnsetDomainResolver resolver = + new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -200,4 +205,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index f1d44fcb47255..45a17744a2697 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,29 +29,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateEmbedResolverTest { private static final String TEST_ENTITY_URN = "urn:li:dashboard:(looker,1)"; private static final String TEST_RENDER_URL = "https://www.google.com"; - private static final UpdateEmbedInput TEST_EMBED_INPUT = new UpdateEmbedInput( - TEST_ENTITY_URN, - TEST_RENDER_URL - ); + private static final UpdateEmbedInput TEST_EMBED_INPUT = + new UpdateEmbedInput(TEST_ENTITY_URN, TEST_RENDER_URL); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test public void testGetSuccessNoExistingEmbed() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -62,14 +61,15 @@ public void testGetSuccessNoExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - verifySingleIngestProposal(mockService, 1, proposal);; + verifySingleIngestProposal(mockService, 1, proposal); + ; - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -79,10 +79,12 @@ public void testGetSuccessExistingEmbed() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(originalEmbed); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalEmbed); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -97,14 +99,14 @@ public void testGetSuccessExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -112,16 +114,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DASHBOARD_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -136,11 +141,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - );; + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + ; } @Test @@ -156,20 +160,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -180,4 +182,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index cde2739b2bcc6..fa8b1d6a747ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -1,14 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EntityExistsResolverTest { private static final String ENTITY_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java index 913ea4602faf0..d9d5e643057ce 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Chart; @@ -14,14 +17,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class EntityPrivilegesResolverTest { final String glossaryTermUrn = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; @@ -29,7 +28,8 @@ public class EntityPrivilegesResolverTest { final String datasetUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)"; final String chartUrn = "urn:li:chart:(looker,baz1)"; final String dashboardUrn = "urn:li:dashboard:(looker,dashboards.1)"; - final String dataJobUrn = "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; + final String dataJobUrn = + "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; private DataFetchingEnvironment setUpTestWithPermissions(Entity entity) { QueryContext mockContext = getMockAllowContext(); @@ -115,11 +115,13 @@ public void testGetFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); DataFetchingEnvironment mockEnv = setUpTestWithoutPermissions(glossaryNode); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class)); EntityPrivilegesResolver resolver = new EntityPrivilegesResolver(mockClient); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 26c13186c4a81..287d270ab569c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,15 +14,9 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.*; - - public class AddRelatedTermsResolverTest { private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; @@ -28,10 +26,11 @@ public class AddRelatedTermsResolverTest { private EntityService setUpService() { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); return mockService; } @@ -48,24 +47,22 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -80,24 +77,22 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -110,9 +105,9 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_ENTITY_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -130,9 +125,9 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - DATASET_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,9 +146,9 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -172,9 +167,9 @@ public void testGetFailAddToNonExistentUrn() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -193,9 +188,9 @@ public void testGetFailAddToNonTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -215,15 +210,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 3b47514d87181..2a36d77716ab7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,39 +11,27 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; -import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static com.linkedin.metadata.Constants.*; - - public class CreateGlossaryNodeResolverTest { - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -47,8 +39,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_NODE_ENTITY_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, props); } @Test @@ -72,16 +64,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -89,16 +79,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,15 +94,13 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 2dbe637d16057..6653b19d6ef2b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -1,61 +1,53 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static com.linkedin.metadata.Constants.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class CreateGlossaryTermResolverTest { private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345"; - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -80,8 +72,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_TERM_ENTITY_NAME, GLOSSARY_TERM_INFO_ASPECT_NAME, props); } @Test @@ -89,16 +81,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,16 +96,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -123,16 +111,14 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -140,73 +126,71 @@ public void testGetFailureExistingTermSameName() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))) - )); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))))); Map result = new HashMap<>(); EnvelopedAspectMap map = new EnvelopedAspectMap(); GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name"); - map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); + map.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map)); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(result); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(result); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( - "test-id", - "Duplicated Name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); + CreateGlossaryEntityInput input = + new CreateGlossaryEntityInput( + "test-id", + "Duplicated Name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); setupTest(mockEnv, input, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private EntityClient initMockClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(new HashMap<>()); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(new HashMap<>()); return mockClient; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 94f0d0b7a1143..7229d2acf763d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,19 +12,14 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class DeleteGlossaryEntityResolverTest { - private static final String TEST_TERM_URN = "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; + private static final String TEST_TERM_URN = + "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; @Test public void testGetSuccess() throws Exception { @@ -33,26 +33,27 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_TERM_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_TERM_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_TERM_URN)), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index 677516e9404e8..b879baf1e65dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,16 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryNodesResolverTest { - final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput( - 0, 100 - ); + final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryNodeUrn1 = "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451"; final String glossaryNodeUrn2 = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -42,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryNodesResolver resolver = new GetRootGlossaryNodesResolver(mockClient); GetRootGlossaryNodesResult result = resolver.get(mockEnv).get(); @@ -64,24 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); - assertEquals(result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); + assertEquals( + result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); + assertEquals( + result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 5aba32108b7db..201bea752d53f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,14 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryTermsResolverTest { final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryTermUrn1 = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; final String glossaryTermUrn2 = "urn:li:glossaryTerm:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -40,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryTermsResolver resolver = new GetRootGlossaryTermsResolver(mockClient); GetRootGlossaryTermsResult result = resolver.get(mockEnv).get(); @@ -62,23 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); - assertEquals(result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); + assertEquals( + result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); + assertEquals( + result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java index 8bfc32e1999ae..969fda541d6a6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java @@ -1,33 +1,32 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.google.common.collect.ImmutableSet; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.Aspect; -import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.Constants; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.Optional; -import java.util.Map; -import java.util.HashMap; - -import static org.testng.Assert.*; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; - public class GlossaryUtilsTest { private final String userUrn = "urn:li:corpuser:authorized"; @@ -44,67 +43,87 @@ private void setUpTests() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(userUrn); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node2") - ); - GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node3") - ); - + GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node2")); + GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node3")); + GlossaryNodeInfo parentNode3 = new GlossaryNodeInfo(); - + Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode1.getParentNode()).data() - ))); - + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode1.getParentNode()) + .data()))); + Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 2") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode3Aspects = new HashMap<>(); - parentNode3Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 3").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn1), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn2), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn3), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); - - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + parentNode3Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 3").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn1), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn2), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn3), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); + + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); } - private void mockAuthRequest(String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { - final AuthorizationRequest authorizationRequest = new AuthorizationRequest( - userUrn, - privilege, - resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty() - ); + private void mockAuthRequest( + String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { + final AuthorizationRequest authorizationRequest = + new AuthorizationRequest( + userUrn, + privilege, + resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(allowOrDeny); Mockito.when(mockAuthorizer.authorize(Mockito.eq(authorizationRequest))).thenReturn(result); @@ -150,7 +169,8 @@ public void testCanManageChildrenEntitiesAuthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn, mockClient)); @@ -162,7 +182,8 @@ public void testCanManageChildrenEntitiesUnauthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); @@ -175,13 +196,16 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorized() throws Exceptio // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -193,13 +217,16 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorized() throws Except // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -211,10 +238,12 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorizedLevel2() throws Ex // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -226,10 +255,12 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorizedLevel2() throws // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn2, mockClient)); @@ -241,7 +272,8 @@ public void testCanManageChildrenRecursivelyEntitiesNoLevel2() throws Exception // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn3, mockClient)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 06dff7611fac8..446f58bec73aa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -16,17 +21,11 @@ import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentNodesResolverTest { @Test @@ -43,76 +42,94 @@ public void testGetSuccessForTerm() throws Exception { termEntity.setType(EntityType.GLOSSARY_TERM); Mockito.when(mockEnv.getSource()).thenReturn(termEntity); - final GlossaryTermInfo parentNode1 = new GlossaryTermInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryTermInfo parentNode1 = + new GlossaryTermInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryTermAspects = new HashMap<>(); - glossaryTermAspects.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryTermAspects.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(termUrn.getEntityType()), - Mockito.eq(termUrn), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(termUrn.getEntityType()), + Mockito.eq(termUrn), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); @@ -132,78 +149,96 @@ public void testGetSuccessForNode() throws Exception { nodeEntity.setType(EntityType.GLOSSARY_NODE); Mockito.when(mockEnv.getSource()).thenReturn(nodeEntity); - final GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryNodeAspects = new HashMap<>(); - glossaryNodeAspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryNodeAspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(nodeUrn.getEntityType()), - Mockito.eq(nodeUrn), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(nodeUrn.getEntityType()), + Mockito.eq(nodeUrn), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 3906d1188cb17..47de668b2c9dc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; @@ -12,15 +16,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Arrays; import java.util.concurrent.ExecutionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RemoveRelatedTermsResolverTest { @@ -35,10 +34,11 @@ public void testGetSuccessIsA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -47,17 +47,16 @@ public void testGetSuccessIsA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -67,10 +66,11 @@ public void testGetSuccessHasA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -79,26 +79,26 @@ public void testGetSuccessHasA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testFailAspectDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -107,9 +107,9 @@ public void testFailAspectDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -124,10 +124,11 @@ public void testFailNoPermissions() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -136,16 +137,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); - Mockito.verify(mockService, Mockito.times(0)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(0)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index eee9cfbae8fcb..3972715fcefb1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; @@ -19,16 +23,10 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateNameResolverTest { private static final String NEW_NAME = "New Name"; @@ -40,23 +38,23 @@ public class UpdateNameResolverTest { private static final UpdateNameInput INPUT_FOR_DOMAIN = new UpdateNameInput(NEW_NAME, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(NEW_NAME); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -88,16 +86,16 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); @@ -118,25 +116,27 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index a78c28890fecf..74a59b10a40b0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -15,45 +20,43 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateParentNodeResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; - private static final String PARENT_NODE_URN = "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; + private static final String PARENT_NODE_URN = + "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; private static final String TERM_URN = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; private static final String NODE_URN = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - private static final UpdateParentNodeInput INPUT = new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); - private static final UpdateParentNodeInput INPUT_WITH_NODE = new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); - private static final UpdateParentNodeInput INVALID_INPUT = new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT = + new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT_WITH_NODE = + new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); + private static final UpdateParentNodeInput INVALID_INPUT = + new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -61,7 +64,8 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -77,7 +81,8 @@ public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -87,17 +92,17 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); @@ -110,7 +115,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -126,7 +132,8 @@ public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -142,7 +149,8 @@ public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java index a20c84d11ba9f..19d9dd20d3f80 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AddGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java index 876de633bd656..a29680a6de52d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateGroupResolverTest { private static final String GROUP_ID = "id"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java index 73b0be96fce17..601d5e08a4233 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class RemoveGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index dae0758f6a2f6..e5cb43c4dab61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -21,15 +23,14 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.Mockito; -import static org.testng.Assert.*; - - public class IngestTestUtils { - public static final Urn TEST_INGESTION_SOURCE_URN = Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); - public static final Urn TEST_SECRET_URN = Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); - public static final Urn TEST_EXECUTION_REQUEST_URN = Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); - + public static final Urn TEST_INGESTION_SOURCE_URN = + Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); + public static final Urn TEST_SECRET_URN = + Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); + public static final Urn TEST_EXECUTION_REQUEST_URN = + Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); public static QueryContext getMockAllowContext() { QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -63,8 +64,13 @@ public static DataHubIngestionSourceInfo getTestIngestionSourceInfo() { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setName("My Test Source"); info.setType("mysql"); - info.setSchedule(new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); - info.setConfig(new DataHubIngestionSourceConfig().setVersion("0.8.18").setRecipe("{}").setExecutorId("executor id")); + info.setSchedule( + new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); + info.setConfig( + new DataHubIngestionSourceConfig() + .setVersion("0.8.18") + .setRecipe("{}") + .setExecutorId("executor id")); return info; } @@ -78,15 +84,18 @@ public static DataHubSecretValue getTestSecretValue() { public static ExecutionRequestInput getTestExecutionRequestInput() { ExecutionRequestInput input = new ExecutionRequestInput(); - input.setArgs(new StringMap( - ImmutableMap.of( - "recipe", "my-custom-recipe", - "version", "0.8.18") - )); + input.setArgs( + new StringMap( + ImmutableMap.of( + "recipe", "my-custom-recipe", + "version", "0.8.18"))); input.setTask("RUN_INGEST"); input.setExecutorId("default"); input.setRequestedAt(0L); - input.setSource(new ExecutionRequestSource().setIngestionSource(TEST_INGESTION_SOURCE_URN).setType("SCHEDULED_INGESTION")); + input.setSource( + new ExecutionRequestSource() + .setIngestionSource(TEST_INGESTION_SOURCE_URN) + .setType("SCHEDULED_INGESTION")); return input; } @@ -99,7 +108,8 @@ public static ExecutionRequestResult getTestExecutionRequestResult() { return result; } - public static void verifyTestIngestionSourceGraphQL(IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { + public static void verifyTestIngestionSourceGraphQL( + IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { assertEquals(ingestionSource.getUrn(), TEST_INGESTION_SOURCE_URN.toString()); assertEquals(ingestionSource.getName(), info.getName()); assertEquals(ingestionSource.getType(), info.getType()); @@ -134,5 +144,5 @@ public static void verifyTestExecutionRequest( assertEquals(executionRequest.getResult().getStartTimeMs(), result.getStartTimeMs()); } - private IngestTestUtils() { } + private IngestTestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 12045b9361469..3de88333b959d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -7,7 +9,6 @@ import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; public class IngestionAuthUtilsTest { @@ -16,11 +17,9 @@ public void testCanManageIngestionAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -37,11 +36,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -58,11 +55,8 @@ public void testCanManageSecretsAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -79,11 +73,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java index e7226c6e4db08..3d0c24b9aa022 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -18,35 +21,36 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CancelIngestionExecutionRequestResolverTest { - private static final CancelIngestionExecutionRequestInput TEST_INPUT = new CancelIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString(), - TEST_EXECUTION_REQUEST_URN.toString() - ); + private static final CancelIngestionExecutionRequestInput TEST_INPUT = + new CancelIngestionExecutionRequestInput( + TEST_INGESTION_SOURCE_URN.toString(), TEST_EXECUTION_REQUEST_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -57,18 +61,19 @@ Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( resolver.get(mockEnv).get(); // Verify ingest proposal has been called to create a Signal request. - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,19 +82,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java index 7973e49c6efdf..18ce1d8c27955 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -12,6 +14,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -19,35 +22,37 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateIngestionExecutionRequestResolverTest { - private static final CreateIngestionExecutionRequestInput TEST_INPUT = new CreateIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString() - ); + private static final CreateIngestionExecutionRequestInput TEST_INPUT = + new CreateIngestionExecutionRequestInput(TEST_INGESTION_SOURCE_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_INGESTION_SOURCE_URN, - new EntityResponse().setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -58,11 +63,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -71,7 +76,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -80,21 +86,21 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,4 +111,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java index 75df240441965..eaf3186524721 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java @@ -1,25 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestConnectionRequestResolverTest { - private static final CreateTestConnectionRequestInput TEST_INPUT = new CreateTestConnectionRequestInput( - "{}", - "0.8.44" - ); + private static final CreateTestConnectionRequestInput TEST_INPUT = + new CreateTestConnectionRequestInput("{}", "0.8.44"); @Test public void testGetSuccess() throws Exception { @@ -27,7 +24,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -37,11 +35,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -50,7 +48,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -59,9 +58,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java index 532b9b89f3a99..268f8b8927b67 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -20,9 +23,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetIngestionExecutionRequestResolverTest { @Test @@ -33,32 +33,48 @@ public void testGetSuccess() throws Exception { ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -69,7 +85,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -78,7 +95,9 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test @@ -87,13 +106,16 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index 25f3ccbd47cd6..fdb150e692441 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,10 +31,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class IngestionSourceExecutionRequestsResolverTest { @Test @@ -40,49 +39,65 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Mock filter response - Mockito.when(mockClient.filter( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) - .thenReturn(new SearchResult() - .setFrom(0) - .setPageSize(10) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableList.of( - new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN)))) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.any(Filter.class), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(10) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN))))); // Mock batch get response ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -99,14 +114,16 @@ public void testGetSuccess() throws Exception { assertEquals((int) executionRequests.getStart(), 0); assertEquals((int) executionRequests.getCount(), 10); assertEquals((int) executionRequests.getTotal(), 1); - verifyTestExecutionRequest(executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); + verifyTestExecutionRequest( + executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -119,29 +136,28 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getSource()).thenReturn(parentSource); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .list( + Mockito.any(), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index c7a72e475f7ab..bec141bddf260 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -8,10 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class RollbackIngestionResolverTest { private static final String RUN_ID = "testRunId"; @@ -46,9 +45,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test @@ -59,24 +57,22 @@ public void testRollbackIngestionMethod() throws Exception { QueryContext mockContext = getMockAllowContext(); resolver.rollbackIngestion(RUN_ID, mockContext).get(); - Mockito.verify(mockClient, Mockito.times(1)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); QueryContext mockContext = getMockAllowContext(); - assertThrows(RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); + assertThrows( + RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java index 2d64d4ec56ba1..85ef304d28533 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java @@ -6,7 +6,6 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.ArgumentMatcher; - public class CreateSecretResolverMatcherTest implements ArgumentMatcher { private MetadataChangeProposal left; @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean secretPropertiesMatch(GenericAspect left, GenericAspect right) { - DataHubSecretValue leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DataHubSecretValue.class - ); - - DataHubSecretValue rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DataHubSecretValue.class - ); + DataHubSecretValue leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DataHubSecretValue.class); + + DataHubSecretValue rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DataHubSecretValue.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index 18ae71661318e..eafdfde364947 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -20,24 +22,18 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateSecretResolverTest { - private static final CreateSecretInput TEST_INPUT = new CreateSecretInput( - "MY_SECRET", - "mysecretvalue", - "none" - ); + private static final CreateSecretInput TEST_INPUT = + new CreateSecretInput("MY_SECRET", "mysecretvalue", "none"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))).thenReturn("encryptedvalue"); + Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))) + .thenReturn("encryptedvalue"); CreateSecretResolver resolver = new CreateSecretResolver(mockClient, mockSecretService); // Execute resolver @@ -57,18 +53,21 @@ public void testGetSuccess() throws Exception { value.setValue("encryptedvalue"); value.setName(TEST_INPUT.getName()); value.setDescription(TEST_INPUT.getDescription()); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateSecretResolverMatcherTest(new MetadataChangeProposal() - .setChangeType(ChangeType.UPSERT) - .setEntityType(Constants.SECRETS_ENTITY_NAME) - .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) - .setAspect(GenericRecordUtils.serializeAspect(value)) - .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + value.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new CreateSecretResolverMatcherTest( + new MetadataChangeProposal() + .setChangeType(ChangeType.UPSERT) + .setEntityType(Constants.SECRETS_ENTITY_NAME) + .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(value)) + .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -80,23 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -108,4 +105,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java index 679425afbf2e7..7cfe33feb58fc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteSecretResolverTest { @Test @@ -26,7 +26,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_SECRET_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test @@ -42,14 +43,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); DeleteSecretResolver resolver = new DeleteSecretResolver(mockClient); // Execute Resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 0042d34e602cc..495adb27dbd5d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -22,14 +25,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetSecretValuesResolverTest { - private static final GetSecretValuesInput TEST_INPUT = new GetSecretValuesInput( - ImmutableList.of(getTestSecretValue().getName()) - ); + private static final GetSecretValuesInput TEST_INPUT = + new GetSecretValuesInput(ImmutableList.of(getTestSecretValue().getName())); @Test public void testGetSuccess() throws Exception { @@ -39,27 +38,29 @@ public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))).thenReturn(decryptedSecretValue); + Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))) + .thenReturn(decryptedSecretValue); DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); @@ -90,22 +91,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); SecretService mockSecretService = Mockito.mock(SecretService.class); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index ad91c214db28f..7d89f4aafa01a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,15 +26,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListSecretsResolverTest { - private static final ListSecretsInput TEST_INPUT = new ListSecretsInput( - 0, 20, null - ); + private static final ListSecretsInput TEST_INPUT = new ListSecretsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { @@ -40,40 +37,43 @@ public void testGetSuccess() throws Exception { DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.search( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver @@ -99,36 +99,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java index c898ae7280710..5172ef01c25eb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteIngestionSourceResolverTest { @Test @@ -22,11 +22,13 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_INGESTION_SOURCE_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test @@ -38,24 +40,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DeleteIngestionSourceResolver resolver = new DeleteIngestionSourceResolver(mockClient); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java index ebafd1782e000..bda18961d3890 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -14,13 +17,9 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.HashSet; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.assertThrows; - public class GetIngestionSourceResolverTest { @Test @@ -30,29 +29,31 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo returnedInfo = getTestIngestionSourceInfo(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInfo.data()))))))); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -72,28 +73,26 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index 8e2453ce06a39..a86d67fcd15c1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,13 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListIngestionSourceResolverTest { - private static final ListIngestionSourcesInput TEST_INPUT = new ListIngestionSourcesInput(0, 20, null, null); + private static final ListIngestionSourcesInput TEST_INPUT = + new ListIngestionSourcesInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { @@ -40,41 +40,47 @@ public void testGetSuccess() throws Exception { final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(""), - Mockito.any(), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), - Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(key.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(key.data()))))))); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver @@ -88,7 +94,8 @@ public void testGetSuccess() throws Exception { assertEquals(resolver.get(mockEnv).get().getCount(), 1); assertEquals(resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getIngestionSources().size(), 1); - verifyTestIngestionSourceGraphQL(resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); + verifyTestIngestionSourceGraphQL( + resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); } @Test @@ -100,35 +107,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java index 16d8da9169a8f..8213a5fb61a55 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceConfigInput; @@ -15,19 +19,16 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpsertIngestionSourceResolverTest { - private static final UpdateIngestionSourceInput TEST_INPUT = new UpdateIngestionSourceInput( - "Test source", - "mysql", "Test source description", - new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), - new UpdateIngestionSourceConfigInput("my test recipe", "0.8.18", "executor id", false, null) - ); + private static final UpdateIngestionSourceInput TEST_INPUT = + new UpdateIngestionSourceInput( + "Test source", + "mysql", + "Test source description", + new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), + new UpdateIngestionSourceConfigInput( + "my test recipe", "0.8.18", "executor id", false, null)); @Test public void testGetSuccess() throws Exception { @@ -38,7 +39,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -48,24 +50,24 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setType(TEST_INPUT.getType()); info.setName(TEST_INPUT.getName()); - info.setSchedule(new DataHubIngestionSourceSchedule() - .setInterval(TEST_INPUT.getSchedule().getInterval()) - .setTimezone(TEST_INPUT.getSchedule().getTimezone()) - ); - info.setConfig(new DataHubIngestionSourceConfig() - .setRecipe(TEST_INPUT.getConfig().getRecipe()) - .setVersion(TEST_INPUT.getConfig().getVersion()) - .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) - .setDebugMode(TEST_INPUT.getConfig().getDebugMode()) - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(MutationUtils.buildMetadataChangeProposalWithUrn(TEST_INGESTION_SOURCE_URN, - INGESTION_INFO_ASPECT_NAME, info) - ), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + info.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval(TEST_INPUT.getSchedule().getInterval()) + .setTimezone(TEST_INPUT.getSchedule().getTimezone())); + info.setConfig( + new DataHubIngestionSourceConfig() + .setRecipe(TEST_INPUT.getConfig().getRecipe()) + .setVersion(TEST_INPUT.getConfig().getVersion()) + .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) + .setDebugMode(TEST_INPUT.getConfig().getDebugMode())); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_INGESTION_SOURCE_URN, INGESTION_INFO_ASPECT_NAME, info)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -77,24 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 61dd6c678e6e0..8fc5ab6ebb828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -14,6 +17,7 @@ import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; @@ -22,153 +26,158 @@ import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.testng.annotations.Test; -import com.linkedin.entity.Aspect; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class MutableTypeBatchResolverTest { - private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; - private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; - private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; - private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; - private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; - private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; - private static final Deprecation TEST_DATASET_1_DEPRECATION; - - static { - try { - TEST_DATASET_1_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; + private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; + private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; + private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; + private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; + private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; + private static final Deprecation TEST_DATASET_1_DEPRECATION; + + static { + try { + TEST_DATASET_1_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } - - private static final Deprecation TEST_DATASET_2_DEPRECATION; - - static { - try { - TEST_DATASET_2_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + } + + private static final Deprecation TEST_DATASET_2_DEPRECATION; + + static { + try { + TEST_DATASET_2_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } + } - @Test - public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType batchMutableType = + new DatasetType(mockClient); - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); - List mockInputs = Arrays.asList( + List mockInputs = + Arrays.asList( new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_1_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build(), + .setUrn(TEST_DATASET_1_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .build()) + .build()) + .build(), new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_2_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build() - ); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Authentication mockAuth = Mockito.mock(Authentication.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); - Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - - Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); - Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); - - Mockito.when(mockClient.batchGetV2(Mockito.eq(Constants.DATASET_ENTITY_NAME), + .setUrn(TEST_DATASET_2_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .build()) + .build()) + .build()); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Authentication mockAuth = Mockito.mock(Authentication.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); + Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + + Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); + Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), Mockito.any(), Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - datasetUrn1, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data())) - ))), - datasetUrn2, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn2) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data())) - ))) - )); - - List result = resolver.get(mockEnv).join(); - - ArgumentCaptor> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), - // Dataset aspects to fetch are private, but aren't important for this test - Mockito.any(), - Mockito.any(Authentication.class) - ); - Collection changeProposals = changeProposalCaptor.getValue(); - - assertEquals(changeProposals.size(), 2); - assertEquals(result.size(), 2); - } - - @Test - public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); - - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - } + .thenReturn( + ImmutableMap.of( + datasetUrn1, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data()))))), + datasetUrn2, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn2) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data()))))))); + + List result = resolver.get(mockEnv).join(); + + ArgumentCaptor> changeProposalCaptor = + ArgumentCaptor.forClass((Class) Collection.class); + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), + // Dataset aspects to fetch are private, but aren't important for this test + Mockito.any(), + Mockito.any(Authentication.class)); + Collection changeProposals = changeProposalCaptor.getValue(); + + assertEquals(changeProposals.size(), 2); + assertEquals(result.size(), 2); + } + + @Test + public void testGetFailureUnauthorized() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType batchMutableType = + new DatasetType(mockClient); + + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java index 1adf7b1200574..bdadfc98f6d85 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java @@ -1,60 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.Siblings; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils; import com.linkedin.metadata.entity.EntityService; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashSet; import java.util.Optional; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; -import static org.testng.AssertJUnit.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class SiblingsUtilsTest { - private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; - private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; - private static final String TEST_DATASET_URN3 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; + private static final String TEST_DATASET_URN1 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; + private static final String TEST_DATASET_URN2 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; + private static final String TEST_DATASET_URN3 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; @Test public void testGetSiblingUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings().setSiblings(siblingUrns) - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings().setSiblings(siblingUrns)); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); } @Test public void testGetSiblingUrnsWithoutSiblings() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings() - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings()); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetSiblingUrnsWithSiblingsAspect() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - null - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(null); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetNextSiblingUrn() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, new HashSet<>()); assertEquals(nextUrn, Optional.of(UrnUtils.getUrn(TEST_DATASET_URN2))); @@ -62,7 +71,8 @@ public void testGetNextSiblingUrn() { @Test public void testGetNextSiblingUrnWithUsedUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); HashSet usedUrns = new HashSet<>(); usedUrns.add(UrnUtils.getUrn(TEST_DATASET_URN2)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, usedUrns); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 9bd44e9ab0906..3fee28bc31725 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -12,13 +15,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; + @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); @@ -36,9 +36,12 @@ public void testWriteCorpUserSettings() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - CorpUserSettings newSettings = new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), - CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + CorpUserSettings newSettings = + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); verifySingleIngestProposal(mockService, 1, proposal); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java index e2661841fe8f7..abc1a5786f363 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -16,37 +20,35 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class ReportOperationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Operation expectedOperation = new Operation() - .setTimestampMillis(0L) - .setLastUpdatedTimestamp(0L) - .setOperationType(OperationType.INSERT) - .setSourceType(OperationSourceType.DATA_PLATFORM) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) - .setCustomOperationType(null, SetMode.IGNORE_NULL) - .setNumAffectedRows(1L); + Operation expectedOperation = + new Operation() + .setTimestampMillis(0L) + .setLastUpdatedTimestamp(0L) + .setOperationType(OperationType.INSERT) + .setSourceType(OperationSourceType.DATA_PLATFORM) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setCustomOperationType(null, SetMode.IGNORE_NULL) + .setNumAffectedRows(1L); - MetadataChangeProposal expectedProposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - OPERATION_ASPECT_NAME, expectedOperation); + MetadataChangeProposal expectedProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), OPERATION_ASPECT_NAME, expectedOperation); // Test setting the domain - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class))) - .thenReturn(TEST_ENTITY_URN); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class))) + .thenReturn(TEST_ENTITY_URN); ReportOperationResolver resolver = new ReportOperationResolver(mockClient); @@ -57,11 +59,9 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -77,9 +77,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private ReportOperationInput getTestInput() { @@ -91,4 +90,4 @@ private ReportOperationInput getTestInput() { input.setSourceType(com.linkedin.datahub.graphql.generated.OperationSourceType.DATA_PLATFORM); return input; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 329d71ec125db..74f88f95fc171 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -24,13 +27,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddOwnersResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_OWNER_1_URN = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_2_URN = "urn:li:corpuser:test-id-2"; private static final String TEST_OWNER_3_URN = "urn:li:corpGroup:test-id-3"; @@ -39,18 +39,23 @@ public class AddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -58,12 +63,20 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), - new OwnerInput(TEST_OWNER_2_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) - ), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), + new OwnerInput( + TEST_OWNER_2_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -71,38 +84,45 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); } @Test public void testGetSuccessExistingOwnerNewType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -110,13 +130,16 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -124,34 +147,42 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -159,12 +190,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -172,39 +207,51 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessMultipleOwnerTypes() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -212,22 +259,28 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_2_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_3_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_GROUP) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -235,27 +288,25 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -266,9 +317,15 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,10 +337,11 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -294,9 +352,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -312,9 +376,15 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -327,21 +397,30 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 79fc62742f444..92a789530d6e4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -23,14 +26,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -38,16 +39,18 @@ public class BatchAddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -56,8 +59,12 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -65,52 +72,64 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetSuccessExistingOwners() throws Exception { - final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); + final Ownership originalOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -119,12 +138,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -132,44 +159,49 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of( - new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -180,20 +212,27 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -205,15 +244,17 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -225,20 +266,27 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -254,20 +302,27 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,32 +335,42 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 9dc2ec8127806..7cef90ffee512 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,13 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of( - TEST_OWNER_URN_1, - TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,24 +78,36 @@ public void testGetSuccessNoExistingOwners() throws Exception { public void testGetSuccessExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners1 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners1); - final Ownership oldOwners2 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_2)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners2 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_2)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -105,10 +121,13 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -120,15 +139,17 @@ public void testGetSuccessExistingOwners() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -140,10 +161,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +183,13 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -175,22 +202,28 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java index 0643ead444c94..ff11d971b52e8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.service.OwnershipTypeService; @@ -15,15 +18,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateOwnershipTypeResolverTest { - private static final CreateOwnershipTypeInput TEST_INPUT = new CreateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final CreateOwnershipTypeInput TEST_INPUT = + new CreateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); @@ -45,10 +44,12 @@ public void testCreateSuccess() throws Exception { assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); assertEquals(ownershipType.getType(), EntityType.CUSTOM_OWNERSHIP_TYPE); - Mockito.verify(mockService, Mockito.times(1)).createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -65,20 +66,18 @@ public void testCreateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testCreateOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createOwnershipType( - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createOwnershipType( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class), Mockito.anyLong()); CreateOwnershipTypeResolver resolver = new CreateOwnershipTypeResolver(mockService); @@ -93,12 +92,13 @@ public void testCreateOwnershipTypeServiceException() throws Exception { private OwnershipTypeService initMockService() { OwnershipTypeService service = Mockito.mock(OwnershipTypeService.class); - Mockito.when(service.createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_OWNERSHIP_TYPE_URN); + Mockito.when( + service.createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_OWNERSHIP_TYPE_URN); return service; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java index 9f526e4008236..ae97164a2787e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -14,11 +18,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.Assert.*; - - public class DeleteOwnershipTypeResolverTest { private static final Urn TEST_URN = @@ -41,11 +40,8 @@ public void testGetSuccessOwnershipTypeCanManage() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test @@ -62,21 +58,17 @@ public void testGetFailureOwnershipTypeCanNotManager() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test public void testGetOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteOwnershipType( - Mockito.any(), - anyBoolean(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteOwnershipType(Mockito.any(), anyBoolean(), Mockito.any(Authentication.class)); DeleteOwnershipTypeResolver resolver = new DeleteOwnershipTypeResolver(mockService); @@ -93,17 +85,18 @@ public void testGetOwnershipTypeServiceException() throws Exception { private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index ceab13167246c..fd7baf6af7469 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,16 +21,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListOwnershipTypesResolverTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); - private static final ListOwnershipTypesInput TEST_INPUT = new ListOwnershipTypesInput(0, 20, "", null); + private static final ListOwnershipTypesInput TEST_INPUT = + new ListOwnershipTypesInput(0, 20, "", null); @Test public void testGetSuccess() throws Exception { @@ -38,21 +38,24 @@ public void testGetSuccess() throws Exception { final OwnershipTypeKey key = new OwnershipTypeKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN))))); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); @@ -78,35 +81,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); // Execute resolver @@ -124,4 +124,4 @@ public static OwnershipTypeInfo getOwnershipTypeInfo() { info.setDescription("some description"); return info; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java index f35b8f98cc1ac..6e428842201d5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; @@ -7,8 +10,8 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; @@ -24,19 +27,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateOwnershipTypeResolverTest { private static final Urn TEST_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateOwnershipTypeInput TEST_INPUT = new UpdateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final UpdateOwnershipTypeInput TEST_INPUT = + new UpdateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); @Test public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { @@ -55,23 +54,27 @@ public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { assertEquals(ownershipType.getInfo().getName(), TEST_INPUT.getName()); assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); - Mockito.verify(mockService, Mockito.times(1)).updateOwnershipType( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateOwnershipType( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testUpdateOwnershipTypeServiceException() throws Exception { // Update resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateOwnershipType( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateOwnershipType( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateOwnershipTypeResolver resolver = new UpdateOwnershipTypeResolver(mockService); @@ -100,39 +103,41 @@ public void testUpdateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getOwnershipTypeEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java index b56d897a468ba..2827e3602e379 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreatePostResolverTest { private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; private static final String POST_MEDIA_LOCATION = @@ -59,9 +58,12 @@ public void testCreatePost() throws Exception { UpdateMediaInput media = new UpdateMediaInput(); media.setType(POST_MEDIA_TYPE); media.setLocation(POST_MEDIA_LOCATION); - Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION)); - when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))) + .thenReturn(mediaObj); UpdatePostContentInput content = new UpdatePostContentInput(); content.setTitle(POST_TITLE); @@ -69,22 +71,33 @@ public void testCreatePost() throws Exception { content.setLink(POST_LINK); content.setContentType(POST_CONTENT_TYPE); content.setMedia(media); - com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( - com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION))); - when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), - eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); CreatePostInput input = new CreatePostInput(); input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); input.setContent(content); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), - eq(_authentication))).thenReturn(true); + when(_postService.createPost( + eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), + eq(postContentObj), + eq(_authentication))) + .thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java index b8a7488a824fd..085cfd0569781 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.urn.Urn; @@ -9,11 +13,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class DeletePostResolverTest { private static final String POST_URN_STRING = "urn:li:post:123"; private PostService _postService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index c22d6bf39640d..6c475cdc7f5a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,17 +33,10 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListPostsResolverTest { private static Map _entityResponseMap; private static final String POST_URN_STRING = "urn:li:post:examplePost"; @@ -49,12 +47,15 @@ public class ListPostsResolverTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private EntityClient _entityClient; @@ -72,8 +73,11 @@ private Map getMockPostsEntityResponse() throws URISyntaxEx DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(postUrn.toString()); dataHubRoleInfo.setName(postUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return ImmutableMap.of(postUrn, entityResponse); } @@ -106,13 +110,27 @@ public void testListPosts() throws Exception { ListPostsInput input = new ListPostsInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(1); roleSearchResult.setEntities( - new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); - - when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), - eq(_authentication), Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search( + eq(POST_ENTITY_NAME), + any(), + eq(null), + any(), + anyInt(), + anyInt(), + eq(_authentication), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java index 9c04c67dd3a3b..eebe0034fce61 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -24,10 +26,10 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.service.QueryService; -import com.linkedin.entity.client.EntityClient; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; import com.linkedin.query.QueryStatement; @@ -40,21 +42,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class CreateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final CreateQueryInput TEST_INPUT = new CreateQueryInput( - new CreateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString())) - ); + private static final CreateQueryInput TEST_INPUT = + new CreateQueryInput( + new CreateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString()))); @Test public void testGetSuccess() throws Exception { @@ -70,25 +70,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -105,23 +115,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateQueryResolver resolver = new CreateQueryResolver(mockService); @@ -136,58 +147,68 @@ public void testGetQueryServiceException() throws Exception { private QueryService initMockService() { QueryService service = Mockito.mock(QueryService.class); - Mockito.when(service.createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_QUERY_URN); - - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects querySubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(querySubjects.data())) - ))) - ); + Mockito.when( + service.createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT + .getProperties() + .getStatement() + .getLanguage() + .toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_QUERY_URN); + + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects querySubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(querySubjects.data())))))); return service; } @@ -197,36 +218,40 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java index 78c894f27cbc3..96ddc632562ee 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -22,14 +25,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteQueryResolverTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test @@ -45,10 +45,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -62,10 +60,8 @@ public void testGetSuccessCanEditQueries() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -79,19 +75,17 @@ public void testGetFailureActorUnauthorized() { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteQuery(Mockito.any(), Mockito.any(Authentication.class)); DeleteQueryResolver resolver = new DeleteQueryResolver(mockService); @@ -108,14 +102,13 @@ private static QueryService initMockService() { QueryService mockService = Mockito.mock(QueryService.class); QuerySubjects existingQuerySubjects = new QuerySubjects(); - existingQuerySubjects.setSubjects(new QuerySubjectArray( - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)) - )); + existingQuerySubjects.setSubjects( + new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(mockService.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) - .thenReturn(existingQuerySubjects); + Mockito.when( + mockService.getQuerySubjects( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn(existingQuerySubjects); return mockService; } @@ -126,40 +119,47 @@ private QueryContext getMockAllowEditQueriesOnQueryContext() { private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getActorUrn()) + .thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), - DeleteQueryResolverTest.TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec( + DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), + DeleteQueryResolverTest.TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 877a4d2b27f6a..8a56b142e5b5e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -32,28 +35,24 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListQueriesResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = Urn.createFromTuple("query", "test-id"); - private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = new ListQueriesInput( - 0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString() - ); - private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = new ListQueriesInput( - 0, 30, null, QuerySource.MANUAL, null - ); - private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = new ListQueriesInput( - 0, 40, null, null, TEST_DATASET_URN.toString() - ); + private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = + new ListQueriesInput(0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString()); + private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = + new ListQueriesInput(0, 30, null, QuerySource.MANUAL, null); + private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = + new ListQueriesInput(0, 40, null, null, TEST_DATASET_URN.toString()); @DataProvider(name = "inputs") public static Object[][] inputs() { - return new Object[][] {{ TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER}}; + return new Object[][] { + {TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER} + }; } @Test(dataProvider = "inputs") @@ -61,22 +60,30 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(input.getQuery() == null ? ListQueriesResolver.DEFAULT_QUERY : input.getQuery()), - Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), - Mockito.eq(new SortCriterion().setField(ListQueriesResolver.CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq( + input.getQuery() == null + ? ListQueriesResolver.DEFAULT_QUERY + : input.getQuery()), + Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), + Mockito.eq( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN))))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); @@ -90,7 +97,8 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getQueries().size(), 1); - assertEquals(resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); } @Test @@ -102,33 +110,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT_FULL_FILTERS); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_FULL_FILTERS); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver @@ -146,7 +156,8 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity if (source != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_SOURCE_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_SOURCE_FIELD, null, ImmutableList.of(source.toString()), false, @@ -154,14 +165,14 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity } if (entityUrn != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_ENTITIES_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_ENTITIES_FIELD, null, ImmutableList.of(entityUrn), false, FilterOperator.EQUAL)); - } criteria.setAnd(andConditions); return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java index 9b500b5fb3936..766d8a2ccb136 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -13,12 +15,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateQueryInput; -import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; -import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.QueryLanguage; import com.linkedin.datahub.graphql.generated.QueryStatementInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; +import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; import com.linkedin.entity.EntityResponse; @@ -40,22 +42,21 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class UpdateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateQueryInput TEST_INPUT = new UpdateQueryInput( - new UpdateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString())) - ); + private static final UpdateQueryInput TEST_INPUT = + new UpdateQueryInput( + new UpdateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString()))); @Test public void testGetSuccess() throws Exception { @@ -72,25 +73,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).updateQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN_2) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateQuery( + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -108,23 +119,24 @@ public void testGetUnauthorizedNoEditQueriesRights() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Update resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateQueryResolver resolver = new UpdateQueryResolver(mockService); @@ -143,56 +155,59 @@ private QueryService initMockService() { // Pre-Update QueryService service = Mockito.mock(QueryService.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(service.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) + Mockito.when( + service.getQuerySubjects(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) .thenReturn(existingSubjects); // Post-Update - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects newSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN_2) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(newSubjects.data())) - ))) - ); + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects newSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(newSubjects.data())))))); return service; } @@ -202,62 +217,71 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editQueriesRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); - - AuthorizationRequest editAllRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); + AuthorizationRequest editQueriesRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editQueriesRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); + + AuthorizationRequest editAllRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); AuthorizationResult editQueriesResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))).thenReturn(editQueriesResult1); + Mockito.when(editQueriesResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))) + .thenReturn(editQueriesResult1); AuthorizationResult editAllResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest1))).thenReturn(editAllResult1); AuthorizationResult editQueriesResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))).thenReturn(editQueriesResult2); + Mockito.when(editQueriesResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))) + .thenReturn(editQueriesResult2); AuthorizationResult editAllResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest2))).thenReturn(editAllResult2); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java index 3cde81d7a7f31..fe032d0bf4859 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AcceptRoleResolverTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -54,7 +53,8 @@ public void testInvalidInviteToken() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(false); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(false); AcceptRoleInput input = new AcceptRoleInput(); input.setInviteToken(INVITE_TOKEN_STRING); @@ -69,8 +69,10 @@ public void testNoRoleUrn() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(null); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(null); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); @@ -89,8 +91,10 @@ public void testAssignRolePasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(roleUrn); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(roleUrn); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java index 85891dbd96fb0..6411728552a1e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.google.common.collect.ImmutableList; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchAssignRoleResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 8d8faf5c3f12e..9197d1b18c0c9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index ef426979953d0..8e761454cb06c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GetInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index 4a0b062c67ffd..d956295faa180 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,14 +31,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListRolesResolverTest { private static final String ADMIN_ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String EDITOR_ROLE_URN_STRING = "urn:li:dataHubRole:Editor"; @@ -47,8 +46,11 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(roleUrn.toString()); dataHubRoleInfo.setName(roleUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return entityResponse; } @@ -57,8 +59,12 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { public void setupTest() throws Exception { Urn adminRoleUrn = Urn.createFromString(ADMIN_ROLE_URN_STRING); Urn editorRoleUrn = Urn.createFromString(EDITOR_ROLE_URN_STRING); - _entityResponseMap = ImmutableMap.of(adminRoleUrn, getMockRoleEntityResponse(adminRoleUrn), editorRoleUrn, - getMockRoleEntityResponse(editorRoleUrn)); + _entityResponseMap = + ImmutableMap.of( + adminRoleUrn, + getMockRoleEntityResponse(adminRoleUrn), + editorRoleUrn, + getMockRoleEntityResponse(editorRoleUrn)); _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); @@ -84,14 +90,28 @@ public void testListRoles() throws Exception { ListRolesInput input = new ListRolesInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(2); - roleSearchResult.setEntities(new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), - new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); - - when(_entityClient.search(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt(), any(), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(2); + roleSearchResult.setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), + new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); + + when(_entityClient.search( + eq(DATAHUB_ROLE_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + any(), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListRolesResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c161a66d3ee93..c7d397c5a4a73 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -30,61 +33,49 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class AggregateAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); List facets = ImmutableList.of("platform", "domains"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -94,18 +85,16 @@ public static void testApplyViewNullBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", viewFilter, // Verify that view filter was used. 0, 0, facets // Verify called with facets we provide - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -113,42 +102,44 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -158,18 +149,15 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, 0, - null - ); + null); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -178,36 +166,28 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - null, - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -218,18 +198,17 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, facets // Verify facets passed in were used - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -238,36 +217,29 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -278,55 +250,50 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, null // Verify that an empty list for facets in input sends null - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -334,45 +301,31 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 0, - null - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 0, null); } @Test public static void testErrorFetchingResults() throws Exception { - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -383,17 +336,18 @@ public static void testErrorFetchingResults() throws Exception { private static Filter createFilter(String field, String value) { return new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField(field) - .setValue(value) - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of(value))) - )) - ))); + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(field) + .setValue(value) + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues(new StringArray(ImmutableList.of(value)))))))); } private static DataHubViewInfo getViewInfo(Filter viewFilter) { @@ -402,24 +356,20 @@ private static DataHubViewInfo getViewInfo(Filter viewFilter) { info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); return info; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -430,22 +380,21 @@ private static EntityClient initMockEntityClient( int start, int limit, List facets, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class), + Mockito.eq(facets))) + .thenReturn(result); return client; } @@ -456,8 +405,8 @@ private static void verifyMockEntityClient( Filter filter, int start, int limit, - List facets - ) throws Exception { + List facets) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -468,21 +417,13 @@ private static void verifyMockEntityClient( Mockito.eq(null), Mockito.eq(null), Mockito.any(Authentication.class), - Mockito.eq(facets) - ); + Mockito.eq(facets)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private AggregateAcrossEntitiesResolverTest() { } - + private AggregateAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 7397ea8fa21cf..3b69337acfbd0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -32,14 +34,12 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class AutoCompleteForMultipleResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private AutoCompleteForMultipleResolverTest() { } + private AutoCompleteForMultipleResolverTest() {} public static void testAutoCompleteResolverSuccess( EntityClient mockClient, @@ -48,9 +48,10 @@ public static void testAutoCompleteResolverSuccess( EntityType entityType, SearchableEntityType entity, Urn viewUrn, - Filter filter - ) throws Exception { - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); + Filter filter) + throws Exception { + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -65,13 +66,7 @@ public static void testAutoCompleteResolverSuccess( Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - entityName, - "test", - filter, - 10 - ); + verifyMockEntityClient(mockClient, entityName, "test", filter, 10); } // test our main entity types @@ -79,43 +74,64 @@ public static void testAutoCompleteResolverSuccess( public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); // Daatasets - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATASET_ENTITY_NAME, EntityType.DATASET, new DatasetType(mockClient), null, null); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATASET_ENTITY_NAME, + EntityType.DATASET, + new DatasetType(mockClient), + null, + null); // Dashboards - mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DASHBOARD_ENTITY_NAME, EntityType.DASHBOARD, new DashboardType(mockClient), null, null); + mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DASHBOARD_ENTITY_NAME, + EntityType.DASHBOARD, + new DashboardType(mockClient), + null, + null); - //DataFlows - mockClient = initMockEntityClient( - Constants.DATA_FLOW_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATA_FLOW_ENTITY_NAME, EntityType.DATA_FLOW, new DataFlowType(mockClient), null, null); + // DataFlows + mockClient = + initMockEntityClient( + Constants.DATA_FLOW_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATA_FLOW_ENTITY_NAME, + EntityType.DATA_FLOW, + new DataFlowType(mockClient), + null, + null); } // test filters with a given view @@ -123,16 +139,16 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); testAutoCompleteResolverSuccess( mockClient, viewService, @@ -140,8 +156,7 @@ public static void testAutoCompleteResolverWithViewFilter() throws Exception { EntityType.DATASET, new DatasetType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); } // test entity type filters with a given view @@ -152,16 +167,16 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti entityNames.add(Constants.DASHBOARD_ENTITY_NAME); DataHubViewInfo viewInfo = createViewInfo(entityNames); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); // ensure we do hit the entity client for dashboards since dashboards are in our view testAutoCompleteResolverSuccess( @@ -171,25 +186,26 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti EntityType.DASHBOARD, new DashboardType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); - // if the view has only dashboards, we should not make an auto-complete request on other entity types + // if the view has only dashboards, we should not make an auto-complete request on other entity + // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), Mockito.eq(10), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(new DatasetType(mockClient)), viewService); + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver( + ImmutableList.of(new DatasetType(mockClient)), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -204,75 +220,60 @@ public static void testAutoCompleteResolverFailNoQuery() throws Exception { } private static EntityClient initMockEntityClient( - String entityName, - String query, - Filter filters, - int limit, - AutoCompleteResult result - ) throws Exception { + String entityName, String query, Filter filters, int limit, AutoCompleteResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.autoComplete( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn(result); + Mockito.when( + client.autoComplete( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filters), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } - + private static void verifyMockEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filters, - int limit - ) throws Exception { + EntityClient mockClient, String entityName, String query, Filter filters, int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), Mockito.eq(limit), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index a599117c3e165..29a2b3081aefe 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,20 +23,16 @@ import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class GetQuickFiltersResolverTest { @@ -41,19 +40,21 @@ public class GetQuickFiltersResolverTest { public static void testGetQuickFiltersHappyPathSuccess() throws Exception { SearchResultMetadata mockData = getHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -72,19 +73,21 @@ public static void testGetQuickFiltersHappyPathSuccess() throws Exception { public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { SearchResultMetadata mockData = getUnHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -103,16 +106,17 @@ public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { public static void testGetQuickFiltersFailure() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -124,26 +128,36 @@ public static void testGetQuickFiltersFailure() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static void compareResultToExpectedData(GetQuickFiltersResult result, GetQuickFiltersResult expected) { - IntStream.range(0, result.getQuickFilters().size()).forEach(index -> { - QuickFilter resultFilter = result.getQuickFilters().get(index); - QuickFilter expectedFilter = expected.getQuickFilters().get(index); - Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); - Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); - if (resultFilter.getEntity() != null) { - Assert.assertEquals(resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); - } - }); + private static void compareResultToExpectedData( + GetQuickFiltersResult result, GetQuickFiltersResult expected) { + IntStream.range(0, result.getQuickFilters().size()) + .forEach( + index -> { + QuickFilter resultFilter = result.getQuickFilters().get(index); + QuickFilter expectedFilter = expected.getQuickFilters().get(index); + Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); + Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); + if (resultFilter.getEntity() != null) { + Assert.assertEquals( + resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); + } + }); } private static SearchResultMetadata getHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); FilterValueArray entityTypeFilters = new FilterValueArray(); entityTypeFilters.add(createFilterValue("dataset", 100, null)); @@ -168,11 +182,18 @@ private static GetQuickFiltersResult getHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); // platforms should be in alphabetical order - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DASHBOARD", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_FLOW", null)); @@ -186,9 +207,12 @@ private static GetQuickFiltersResult getHappyPathResultData() { private static SearchResultMetadata getUnHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); // only 3 platforms available - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); FilterValueArray entityTypeFilters = new FilterValueArray(); // no dashboard, data flows, or glossary terms @@ -210,10 +234,15 @@ private static SearchResultMetadata getUnHappyPathTestData() { private static GetQuickFiltersResult getUnHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); - // in correct order by count for platforms (alphabetical). In correct order by priority for entity types - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + // in correct order by count for platforms (alphabetical). In correct order by priority for + // entity types + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_JOB", null)); quickFilters.add(createQuickFilter("_entityType", "CHART", null)); @@ -224,7 +253,8 @@ private static GetQuickFiltersResult getUnHappyPathResultData() { return result; } - private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { + private static QuickFilter createQuickFilter( + @Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(value); @@ -234,7 +264,8 @@ private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnu return quickFilter; } - private static FilterValue createFilterValue(@Nonnull final String value, final int count, @Nullable final String entity) { + private static FilterValue createFilterValue( + @Nonnull final String value, final int count, @Nullable final String entity) { FilterValue filterValue = new FilterValue(); filterValue.setValue(value); filterValue.setFacetCount(count); @@ -244,7 +275,8 @@ private static FilterValue createFilterValue(@Nonnull final String value, final return filterValue; } - private static AggregationMetadata createAggregationMetadata(@Nonnull final String name, @Nonnull final FilterValueArray filterValues) { + private static AggregationMetadata createAggregationMetadata( + @Nonnull final String name, @Nonnull final FilterValueArray filterValues) { AggregationMetadata aggregationMetadata = new AggregationMetadata(); aggregationMetadata.setName(name); aggregationMetadata.setFilterValues(filterValues); @@ -257,24 +289,22 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private GetQuickFiltersResolverTest() { } - + private GetQuickFiltersResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index b0a681c9b2342..d0bbfd126b9b9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -38,167 +41,172 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - public class SearchAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockEntityClient( + mockClient, + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", - viewFilter, + viewFilter, // Verify that view filter was used. 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockEntityClient( - mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. - "", - viewFilter, // Verify that view filter was used. - 0, - 10 - ); - - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + 10); + + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("baseField.keyword") - .setValue("baseTest") - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("baseTest"))) - )) - ))); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("baseField.keyword") + .setValue("baseTest") + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues( + new StringArray(ImmutableList.of("baseTest")))))))); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -208,74 +216,66 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewNullBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - null, - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + null, "", 0, 10, null, null, TEST_VIEW_URN.toString(), null, null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -285,74 +285,75 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewEmptyBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -362,56 +363,55 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -419,49 +419,41 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 10 - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 10); } @Test public static void testApplyViewErrorFetchingView() throws Exception { // When a view cannot be successfully resolved, the endpoint show THROW. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -470,17 +462,10 @@ public static void testApplyViewErrorFetchingView() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -490,21 +475,20 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } @@ -514,8 +498,8 @@ private static void verifyMockEntityClient( String query, Filter filter, int start, - int limit - ) throws Exception { + int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -525,21 +509,13 @@ private static void verifyMockEntityClient( Mockito.eq(limit), Mockito.eq(null), Mockito.eq(null), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private SearchAcrossEntitiesResolverTest() { } - + private SearchAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index c68b621e6921f..273f7156c12a8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.UrnUtils; @@ -23,15 +27,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - // Initialize this class in the style of SearchAcrossEntitiesResolverTest.java public class SearchAcrossLineageResolverTest { - private static final String SOURCE_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; - private static final String TARGET_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; + private static final String SOURCE_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; + private static final String TARGET_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; private static final String QUERY = ""; private static final int START = 0; private static final int COUNT = 10; @@ -87,19 +88,20 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); when(_entityClient.searchAcrossLineage( - eq(UrnUtils.getUrn(SOURCE_URN_STRING)), - eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), - anyList(), - eq(QUERY), - eq(null), - any(), - eq(null), - eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))).thenReturn(lineageSearchResult); + eq(UrnUtils.getUrn(SOURCE_URN_STRING)), + eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), + anyList(), + eq(QUERY), + eq(null), + any(), + eq(null), + eq(START), + eq(COUNT), + eq(START_TIMESTAMP_MILLIS), + eq(END_TIMESTAMP_MILLIS), + eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), + eq(_authentication))) + .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 6ba8b3cefe504..24724cb8e23ad 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,134 +18,107 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - - public class SearchResolverTest { - @Test - public void testDefaultSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - @Test - public void testOverrideSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchFlags inputSearchFlags = new SearchFlags(); - inputSearchFlags.setFulltext(false); - inputSearchFlags.setSkipAggregates(true); - inputSearchFlags.setSkipHighlighting(true); - inputSearchFlags.setMaxAggValues(10); - inputSearchFlags.setSkipCache(true); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 1, - 11, - null, - null, - inputSearchFlags - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 1, - 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true) - ); - } - - @Test - public void testNonWildCardSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "not a wildcard", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "not a wildcard", - null, // Verify that view filter was used. - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - private EntityClient initMockSearchEntityClient() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.search( + @Test + public void testDefaultSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = new SearchInput(EntityType.DATASET, "", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + @Test + public void testOverrideSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchFlags inputSearchFlags = new SearchFlags(); + inputSearchFlags.setFulltext(false); + inputSearchFlags.setSkipAggregates(true); + inputSearchFlags.setSkipHighlighting(true); + inputSearchFlags.setMaxAggValues(10); + inputSearchFlags.setSkipCache(true); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "", 1, 11, null, null, inputSearchFlags); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 1, + 11, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true)); + } + + @Test + public void testNonWildCardSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "not a wildcard", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "not a wildcard", + null, // Verify that view filter was used. + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + private EntityClient initMockSearchEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + Mockito.when( + client.search( Mockito.anyString(), Mockito.anyString(), Mockito.any(), @@ -151,40 +126,38 @@ private EntityClient initMockSearchEntityClient() throws Exception { Mockito.anyInt(), Mockito.anyInt(), Mockito.any(Authentication.class), - Mockito.any() - )).thenReturn( - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - return client; - } - - private void verifyMockSearchEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filter, - SortCriterion sortCriterion, - int start, - int limit, - com.linkedin.metadata.query.SearchFlags searchFlags - ) throws Exception { - Mockito.verify(mockClient, Mockito.times(1)).search( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(sortCriterion), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags) - ); - } - - private SearchResolverTest() { - } + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + return client; + } + + private void verifyMockSearchEntityClient( + EntityClient mockClient, + String entityName, + String query, + Filter filter, + SortCriterion sortCriterion, + int start, + int limit, + com.linkedin.metadata.query.SearchFlags searchFlags) + throws Exception { + Mockito.verify(mockClient, Mockito.times(1)) + .search( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(sortCriterion), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class), + Mockito.eq(searchFlags)); + } + + private SearchResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java index b35f7a77b209c..8f23f0a624576 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java @@ -17,16 +17,18 @@ public class SearchUtilsTest { @Test public static void testApplyViewToFilterNullBaseFilter() { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(null, viewFilter); Assert.assertEquals(viewFilter, result); @@ -34,275 +36,272 @@ public static void testApplyViewToFilterNullBaseFilter() { @Test public static void testApplyViewToFilterComplexBaseFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterComplexViewFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray(ImmutableList.of("viewTest4")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterV1Filter() { - Filter baseFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ); - - Filter viewFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ); - - Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( + Filter baseFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("field1") .setValue("test1") @@ -310,7 +309,13 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("field2") .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), + .setValues(new StringArray(ImmutableList.of("test2")))))); + + Filter viewFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("viewField1") .setValue("viewTest1") @@ -318,10 +323,38 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("viewField2") .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ) - ))); + .setValues(new StringArray(ImmutableList.of("viewTest2")))))); + + Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); + + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray( + ImmutableList.of("viewTest2"))))))))); Assert.assertEquals(expectedResult, result); } @@ -329,24 +362,17 @@ public static void testApplyViewToFilterV1Filter() { @Test public static void testApplyViewToEntityTypes() { - List baseEntityTypes = ImmutableList.of( - Constants.CHART_ENTITY_NAME, - Constants.DATASET_ENTITY_NAME - ); + List baseEntityTypes = + ImmutableList.of(Constants.CHART_ENTITY_NAME, Constants.DATASET_ENTITY_NAME); - List viewEntityTypes = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME - ); + List viewEntityTypes = + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME); final List result = SearchUtils.intersectEntityTypes(baseEntityTypes, viewEntityTypes); - final List expectedResult = ImmutableList.of( - Constants.DATASET_ENTITY_NAME - ); + final List expectedResult = ImmutableList.of(Constants.DATASET_ENTITY_NAME); Assert.assertEquals(expectedResult, result); } - private SearchUtilsTest() { } - + private SearchUtilsTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java index 905e913fba909..553a2c85a7ae2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,29 +18,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateCorpUserViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = new UpdateCorpUserViewsSettingsInput( - TEST_URN.toString() - ); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = new UpdateCorpUserViewsSettingsInput( - null - ); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = + new UpdateCorpUserViewsSettingsInput(TEST_URN.toString()); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = + new UpdateCorpUserViewsSettingsInput(null); @Test public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -46,25 +44,28 @@ public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessViewSettingsExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -73,26 +74,28 @@ public void testGetSuccessViewSettingsExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } - @Test public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -101,22 +104,26 @@ public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), + Mockito.any(Authentication.class)); } @Test public void testGetCorpUserSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getCorpUserSettings(Mockito.eq(TEST_USER_URN), Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -126,19 +133,18 @@ public void testGetCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateCorpUserSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - null - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(CorpUserSettings.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(TEST_USER_URN, null); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.any(CorpUserSettings.class), + Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -148,17 +154,13 @@ public void testUpdateCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - Urn user, - CorpUserSettings existingSettings - ) { + private static SettingsService initSettingsService(Urn user, CorpUserSettings existingSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getCorpUserSettings( - Mockito.eq(user), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getCorpUserSettings(Mockito.eq(user), Mockito.any(Authentication.class))) .thenReturn(existingSettings); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java index 4e2283735b8c9..8f96eae9480f8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,10 +17,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class GlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -25,9 +24,7 @@ public class GlobalViewsSettingsResolverTest { @Test public void testGetSuccessNullSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -42,9 +39,7 @@ public void testGetSuccessNullSettings() throws Exception { @Test public void testGetSuccessEmptySettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -53,16 +48,13 @@ public void testGetSuccessEmptySettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertNull( - result.getDefaultView() - ); + Assert.assertNull(result.getDefaultView()); } @Test public void testGetSuccessExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(TEST_URN) - ); + SettingsService mockService = + initSettingsService(new GlobalViewsSettings().setDefaultView(TEST_URN)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -71,17 +63,15 @@ public void testGetSuccessExistingSettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertEquals( - result.getDefaultView(), - TEST_URN.toString() - ); + Assert.assertEquals(result.getDefaultView(), TEST_URN.toString()); } @Test public void testGetException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); @@ -94,9 +84,7 @@ public void testGetException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -107,15 +95,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java index 9ea3c223559cd..c0cc09052176d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,22 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateGlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); - private static final UpdateGlobalViewsSettingsInput TEST_INPUT = new UpdateGlobalViewsSettingsInput( - TEST_URN.toString() - ); + private static final UpdateGlobalViewsSettingsInput TEST_INPUT = + new UpdateGlobalViewsSettingsInput(TEST_URN.toString()); @Test public void testGetSuccessNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -39,16 +35,17 @@ public void testGetSuccessNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessNoDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -58,18 +55,20 @@ public void testGetSuccessNoDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessExistingDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - )) - ); + SettingsService mockService = + initSettingsService( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView"))); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -79,16 +78,20 @@ public void testGetSuccessExistingDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetGlobalViewsSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -100,15 +103,13 @@ public void testGetGlobalViewsSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateGlobalViewsSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateGlobalSettings( - Mockito.any(GlobalSettingsInfo.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateGlobalSettings( + Mockito.any(GlobalSettingsInfo.class), Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -122,11 +123,13 @@ public void testUpdateGlobalViewsSettingsException() throws Exception { @Test public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - null // Should never be null. - ); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + SettingsService mockService = + initSettingsService( + null // Should never be null. + ); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -140,9 +143,7 @@ public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -154,15 +155,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) - .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) + .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java index 8c4445452c564..db3e9afab7249 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -21,12 +26,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchGetStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final long TIME = 123L; @@ -35,7 +34,8 @@ public class BatchGetStepStatesResolverTest { private static final String SECOND_STEP_STATE_ID = "2"; private static final Urn FIRST_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:1"); private static final Urn SECOND_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:2"); - private static final Set ASPECTS = ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); + private static final Set ASPECTS = + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); private EntityClient _entityClient; private BatchGetStepStatesResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; @@ -68,15 +68,17 @@ public void testBatchGetStepStatesFirstStepCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map entityResponseMap = ImmutableMap.of(FIRST_STEP_STATE_URN, - TestUtils.buildEntityResponse(firstAspectMap)); + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of(FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap)); - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); } @@ -100,18 +102,21 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN, SECOND_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map secondAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - secondStepStateProperties); - final Map entityResponseMap = ImmutableMap.of( - FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), - SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); - - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map secondAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, secondStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of( + FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), + SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); + + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(2, actualBatchResult.getResults().size()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java index 5f20a11f15ac6..b457498cc547a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,11 +20,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchUpdateStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final String FIRST_STEP_STATE_ID = "1"; @@ -52,7 +51,8 @@ public void testBatchUpdateStepStatesFirstStepCompleted() throws Exception { input.setStates(ImmutableList.of(firstInput)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - final BatchUpdateStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchUpdateStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); verify(_entityClient, times(1)).ingestProposal(any(), eq(_authentication), eq(false)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 268d6a6bc4268..340802cde467b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -20,14 +24,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class AddTagsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -35,11 +35,12 @@ public class AddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -50,46 +51,51 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(originalTags); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -100,41 +106,43 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -145,9 +153,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +166,11 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -173,9 +181,8 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -191,9 +198,8 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -206,21 +212,21 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 651b89359c83f..71354627b1145 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -18,21 +22,17 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchAddTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -40,19 +40,20 @@ public class BatchAddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,55 +65,63 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -126,45 +135,49 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,33 +188,36 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -213,19 +229,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -236,42 +254,47 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index f302540eba904..8cd10afee293e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -26,15 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchRemoveTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -42,15 +43,17 @@ public class BatchRemoveTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -64,22 +67,25 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); proposal2.setEntityType(Constants.DATASET_ENTITY_NAME); proposal2.setAspectName(Constants.GLOBAL_TAGS_ASPECT_NAME); @@ -93,25 +99,33 @@ public void testGetSuccessNoExistingTags() throws Exception { public void testGetSuccessExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags1 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags1); - final GlobalTags oldTags2 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags2 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -125,22 +139,25 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -149,15 +166,17 @@ public void testGetSuccessExistingTags() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -169,19 +188,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -192,44 +213,49 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index f801daf4f2a3f..dac7104ca2930 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -1,39 +1,36 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.tag.TagProperties; import com.linkedin.metadata.key.TagKey; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.tag.TagProperties; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class CreateTagResolverTest { - private static final CreateTagInput TEST_INPUT = new CreateTagInput( - "test-id", - "test-name", - "test-description" - ); + private static final CreateTagInput TEST_INPUT = + new CreateTagInput("test-id", "test-name", "test-description"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -50,15 +47,13 @@ public void testGetSuccess() throws Exception { TagProperties props = new TagProperties(); props.setDescription("test-description"); props.setName("test-name"); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, props); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, props); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -75,9 +70,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -85,10 +79,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); // Execute resolver @@ -99,4 +92,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java index b01ac1a9b14ae..11dfad43d5731 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTagResolverTest { private static final String TEST_URN = "urn:li:tag:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index b5bbf0775a8ba..6ae72fcbb7268 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,11 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetTagColorResolverTest { private static final String TEST_ENTITY_URN = "urn:li:tag:test-tag"; @@ -41,10 +40,11 @@ public void testGetSuccessExistingProperties() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTagProperties); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -59,19 +59,17 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final TagProperties newTagProperties = new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - TAG_PROPERTIES_ASPECT_NAME, newTagProperties); + final TagProperties newTagProperties = + new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), TAG_PROPERTIES_ASPECT_NAME, newTagProperties); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -81,10 +79,11 @@ public void testGetFailureNoExistingProperties() throws Exception { EntityService mockService = getMockEntityService(); // Test setting the domain - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -99,9 +98,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -111,21 +109,26 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - final EnvelopedAspect oldTagPropertiesAspect = new EnvelopedAspect() - .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) - .setValue(new Aspect(oldTagProperties.data())); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.TAG_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.TAG_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.TAG_PROPERTIES_ASPECT_NAME, - oldTagPropertiesAspect))))); + final EnvelopedAspect oldTagPropertiesAspect = + new EnvelopedAspect() + .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) + .setValue(new Aspect(oldTagProperties.data())); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.TAG_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.TAG_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -139,9 +142,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("colorHex"))).thenReturn(TEST_COLOR_HEX); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -159,18 +161,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetTagColorResolver resolver = new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetTagColorResolver resolver = + new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -181,4 +183,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 213d21fd35dc1..cb827a42333b2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -19,13 +22,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddTermsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -33,11 +33,12 @@ public class AddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); @@ -48,41 +49,42 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -94,37 +96,34 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetFailureTermDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -135,26 +134,28 @@ public void testGetFailureTermDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -165,16 +166,17 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -185,38 +187,41 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 8887bb452b478..7df19fad52689 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_GLOSSARY_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_GLOSSARY_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,123 +36,134 @@ public class BatchAddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of( - TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -163,31 +175,35 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -203,11 +219,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -220,21 +237,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 995a4acb8a467..659ce40542a9c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,12 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,25 +77,36 @@ public void testGetSuccessNoExistingTerms() throws Exception { public void testGetSuccessExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms1 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms1); - final GlossaryTerms oldTerms2 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms2 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,12 +120,12 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -123,15 +137,17 @@ public void testGetSuccessExistingTerms() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -143,12 +159,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -164,12 +180,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -182,24 +198,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java index 911152d8c97c1..adf4b1c29ad0d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestInput; @@ -19,19 +22,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestResolverTest { - private static final CreateTestInput TEST_INPUT = new CreateTestInput( - "test-id", - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final CreateTestInput TEST_INPUT = + new CreateTestInput( + "test-id", + "test-name", + "test-category", + "test-description", + new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -50,16 +49,21 @@ public void testGetSuccess() throws Exception { final TestKey key = new TestKey(); key.setId("test-id"); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityKeyAspect(), GenericRecordUtils.serializeAspect(key)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -80,19 +84,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTestResolver resolver = new CreateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java index 6a449e3c4c4c4..1c4973871af09 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 5026e015039e1..6075425d09c05 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,37 +21,34 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListTestsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("test", "test-id"); - private static final ListTestsInput TEST_INPUT = new ListTestsInput( - 0, 20, null - ); + private static final ListTestsInput TEST_INPUT = new ListTestsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.TEST_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.TEST_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListTestsResolver resolver = new ListTestsResolver(mockClient); @@ -75,33 +75,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver @@ -112,4 +114,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java index ae24232bce17c..45e0126367578 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; +import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -19,19 +22,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; - private static final UpdateTestInput TEST_INPUT = new UpdateTestInput( - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final UpdateTestInput TEST_INPUT = + new UpdateTestInput( + "test-name", "test-category", "test-description", new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -48,16 +44,21 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityUrn(), UrnUtils.getUrn(TEST_URN)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -79,18 +80,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Update resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateTestResolver resolver = new UpdateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java index 2164d4160634c..742e162963ea3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateNativeUserResetTokenResolverTest { private static final String RESET_TOKEN = "resetToken"; @@ -47,7 +46,8 @@ public void testFailsNullUserUrn() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -59,7 +59,8 @@ public void testPasses() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java index 0957acf0cbbb3..15864dc3ac925 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -15,6 +18,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -23,34 +28,35 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateViewResolverTest { - private static final CreateViewInput TEST_INPUT = new CreateViewInput( - DataHubViewType.PERSONAL, - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final CreateViewInput TEST_INPUT = + new CreateViewInput( + DataHubViewType.PERSONAL, + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -71,37 +77,59 @@ public void testGetSuccess() throws Exception { assertEquals(view.getDescription(), TEST_INPUT.getDescription()); assertEquals(view.getViewType(), TEST_INPUT.getViewType()); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - assertEquals(view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); - assertEquals(view.getDefinition().getFilter().getOperator(), TEST_INPUT.getDefinition().getFilter().getOperator()); - assertEquals(view.getDefinition().getFilter().getFilters().size(), TEST_INPUT.getDefinition().getFilter().getFilters().size()); - - Mockito.verify(mockService, Mockito.times(1)).createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + assertEquals( + view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); + assertEquals( + view.getDefinition().getFilter().getOperator(), + TEST_INPUT.getDefinition().getFilter().getOperator()); + assertEquals( + view.getDefinition().getFilter().getFilters().size(), + TEST_INPUT.getDefinition().getFilter().getFilters().size()); + + Mockito.verify(mockService, Mockito.times(1)) + .createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -118,22 +146,23 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createView( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createView( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateViewResolver resolver = new CreateViewResolver(mockService); @@ -148,14 +177,15 @@ public void testGetViewServiceException() throws Exception { private ViewService initMockService() { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_VIEW_URN); + Mockito.when( + service.createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_VIEW_URN); return service; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java index afb4c16767f47..357f2119187d6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -17,10 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -40,10 +39,8 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -60,10 +57,8 @@ public void testGetSuccessGlobalViewCanManager() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -79,13 +74,10 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } - @Test public void testGetSuccessPersonalViewIsCreator() throws Exception { ViewService mockService = initViewService(DataHubViewType.PERSONAL); @@ -99,10 +91,8 @@ public void testGetSuccessPersonalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -118,19 +108,17 @@ public void testGetFailurePersonalViewIsNotCreator() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteView( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteView(Mockito.any(), Mockito.any(Authentication.class)); DeleteViewResolver resolver = new DeleteViewResolver(mockService); @@ -146,19 +134,21 @@ public void testGetViewServiceException() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 9a25c9eb1d25c..8c30c17201bc6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -28,53 +31,53 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListGlobalViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput( - 0, 20, "" - ); + private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput(0, 20, ""); @Test public void testGetSuccessInput() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListGlobalViewsResolver resolver = new ListGlobalViewsResolver(mockClient); @@ -107,7 +110,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -121,7 +125,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -139,4 +145,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 4c43584144825..85e20cd656fcd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -27,63 +30,65 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListMyViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListMyViewsInput TEST_INPUT_1 = new ListMyViewsInput( - 0, 20, "", DataHubViewType.GLOBAL - ); + private static final ListMyViewsInput TEST_INPUT_1 = + new ListMyViewsInput(0, 20, "", DataHubViewType.GLOBAL); - private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput( - 0, 20, "", null - ); + private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput(0, 20, "", null); @Test public void testGetSuccessInput1() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false), + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false), new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -106,35 +111,41 @@ public void testGetSuccessInput2() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -165,7 +176,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -179,7 +191,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -197,4 +211,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java index b4895982ae780..1917e55705828 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,30 +41,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateViewInput TEST_INPUT = new UpdateViewInput( - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final UpdateViewInput TEST_INPUT = + new UpdateViewInput( + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); @Test public void testGetSuccessGlobalViewIsCreator() throws Exception { @@ -81,33 +87,50 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -128,46 +151,65 @@ public void testGetSuccessGlobalViewManageGlobalViews() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testGetViewServiceException() throws Exception { // Update resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateView( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateView( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateViewResolver resolver = new UpdateViewResolver(mockService); @@ -196,43 +238,46 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getViewEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getViewEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java index 9578ff201ca19..3ad3f0786e987 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -25,12 +28,8 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.Assert; -import org.testng.annotations.Test; import org.mockito.Mockito; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - +import org.testng.annotations.Test; public class ViewUtilsTest { @@ -39,10 +38,10 @@ public class ViewUtilsTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); - @Test public static void testCanCreatePersonalViewAllowed() { - boolean res = ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); + boolean res = + ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); Assert.assertTrue(res); } @@ -67,10 +66,8 @@ public void testCanUpdateViewSuccessGlobalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -80,10 +77,8 @@ public void testCanUpdateViewSuccessGlobalViewCanManageGlobalViews() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -93,10 +88,8 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -106,10 +99,8 @@ public void testGetSuccessPersonalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -119,50 +110,69 @@ public void testGetFailurePersonalViewIsNotCreator() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test public void testMapDefinition() throws Exception { - DataHubViewDefinitionInput input = new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.IN), - new FacetFilterInput("test2", null, ImmutableList.of("value3", "value4"), true, FilterOperator.CONTAIN) - ) - ) - ); - - DataHubViewDefinition expectedResult = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion() - .setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setValue("value1") // Disgraceful - .setField("test1.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.IN), - new Criterion() - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("value3", "value4"))) - .setValue("value3") // Disgraceful - .setField("test2.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.CONTAIN) - )) - ) - ) - )) - ); + DataHubViewDefinitionInput input = + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.IN), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value3", "value4"), + true, + FilterOperator.CONTAIN)))); + + DataHubViewDefinition expectedResult = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setNegated(false) + .setValues( + new StringArray( + ImmutableList.of("value1", "value2"))) + .setValue("value1") // Disgraceful + .setField( + "test1.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.IN), + new Criterion() + .setNegated(true) + .setValues( + new StringArray( + ImmutableList.of("value3", "value4"))) + .setValue("value3") // Disgraceful + .setField( + "test2.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.CONTAIN)))))))); assertEquals(ViewUtils.mapDefinition(input), expectedResult); } @@ -170,17 +180,20 @@ public void testMapDefinition() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when( + mockService.getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c4465c7d3cb65..c975c7ebb0507 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,27 +30,25 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class AssertionTypeTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:guid-1"; - private static final AssertionKey TEST_ASSERTION_KEY = new AssertionKey() - .setAssertionId("guid-1"); - private static final AssertionInfo TEST_ASSERTION_INFO = new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(null, SetMode.IGNORE_NULL) - .setCustomProperties(new StringMap()); - private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("snowflake")) - .setInstance(null, SetMode.IGNORE_NULL); + private static final AssertionKey TEST_ASSERTION_KEY = + new AssertionKey().setAssertionId("guid-1"); + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion(null, SetMode.IGNORE_NULL) + .setCustomProperties(new StringMap()); + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn("snowflake")) + .setInstance(null, SetMode.IGNORE_NULL); private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:guid-2"; - @Test public void testBatchLoad() throws Exception { @@ -60,41 +60,43 @@ public void testBatchLoad() throws Exception { Map assertion1Aspects = new HashMap<>(); assertion1Aspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data()))); assertion1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))); assertion1Aspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - assertionUrn1, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn1) - .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); - - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn1, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn1) + .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); + + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -112,17 +114,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 3ff4e43ca112c..1e2acd0db455c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,6 +1,7 @@ - package com.linkedin.datahub.graphql.types.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,12 +27,12 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.container.ContainerProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -46,46 +47,55 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class ContainerTypeTest { private static final String TEST_CONTAINER_1_URN = "urn:li:container:guid-1"; - private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey() - .setGuid("guid-1"); - private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = new ContainerProperties() - .setDescription("test description") - .setName("Test Container"); - private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = new EditableContainerProperties() - .setDescription("test editable description"); - private static final Ownership TEST_CONTAINER_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); - private static final Status TEST_CONTAINER_1_STATUS = new Status() - .setRemoved(false); - private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = new SubTypes() - .setTypeNames(new StringArray(ImmutableList.of("Database"))); - private static final GlobalTags TEST_CONTAINER_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); - private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = new com.linkedin.container.Container() - .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); + private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey().setGuid("guid-1"); + private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = + new ContainerProperties().setDescription("test description").setName("Test Container"); + private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = + new EditableContainerProperties().setDescription("test editable description"); + private static final Ownership TEST_CONTAINER_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); + private static final Status TEST_CONTAINER_1_STATUS = new Status().setRemoved(false); + private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("Database"))); + private static final GlobalTags TEST_CONTAINER_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = + new com.linkedin.container.Container() + .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); private static final String TEST_CONTAINER_2_URN = "urn:li:container:guid-2"; @@ -100,73 +110,65 @@ public void testBatchLoad() throws Exception { Map container1Aspects = new HashMap<>(); container1Aspects.put( Constants.CONTAINER_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data()))); container1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data()))); container1Aspects.put( Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data()))); container1Aspects.put( Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data()))); container1Aspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data()))); container1Aspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data()))); container1Aspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data()))); container1Aspects.put( Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data()))); container1Aspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data()))); container1Aspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data()))); container1Aspects.put( Constants.CONTAINER_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - containerUrn1, - new EntityResponse() - .setEntityName(Constants.CONTAINER_ENTITY_NAME) - .setUrn(containerUrn1) - .setAspects(new EnvelopedAspectMap(container1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + containerUrn1, + new EntityResponse() + .setEntityName(Constants.CONTAINER_ENTITY_NAME) + .setUrn(containerUrn1) + .setAspects(new EnvelopedAspectMap(container1Aspects)))); ContainerType type = new ContainerType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -177,8 +179,12 @@ public void testBatchLoad() throws Exception { assertEquals(container1.getProperties().getDescription(), "test description"); assertEquals(container1.getProperties().getName(), "Test Container"); assertEquals(container1.getInstitutionalMemory().getElements().size(), 1); - assertEquals(container1.getSubTypes().getTypeNames().get(0), TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); - assertEquals(container1.getEditableProperties().getDescription(), TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); + assertEquals( + container1.getSubTypes().getTypeNames().get(0), + TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); + assertEquals( + container1.getEditableProperties().getDescription(), + TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); assertEquals( container1.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_CONTAINER_1_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); @@ -186,8 +192,7 @@ public void testBatchLoad() throws Exception { container1.getTags().getTags().get(0).getTag().getUrn(), TEST_CONTAINER_1_TAGS.getTags().get(0).getTag().toString()); assertEquals( - container1.getContainer().getUrn(), - TEST_CONTAINER_1_CONTAINER.getContainer().toString()); + container1.getContainer().getUrn(), TEST_CONTAINER_1_CONTAINER.getContainer().toString()); // Assert second element is null. assertNull(result.get(1)); @@ -196,17 +201,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 9b6e11fd0b3a4..667d943b1095d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; -import com.linkedin.common.Ownership; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Owner; -import com.linkedin.common.OwnershipType; +import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlobalTags; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.Status; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; -import com.linkedin.common.Status; import com.linkedin.common.url.Url; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; @@ -33,185 +35,181 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataPlatformInstanceTest { - private static final Urn TEST_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); - - private static final String TEST_DATAPLATFORMINSTANCE_1_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; - - private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY - = new DataPlatformInstanceKey() - .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) - .setInstance("I1"); - - private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES - = new DataPlatformInstanceProperties() - .setDescription("test description") - .setName("Test Data Platform Instance"); - - private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = new Deprecation() - .setDeprecated(true) - .setActor(TEST_ACTOR_URN) - .setNote("legacy"); - - private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(TEST_ACTOR_URN)))); - - private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); - - private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - - private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status() - .setRemoved(false); - - private static final String TEST_DATAPLATFORMINSTANCE_2_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; - - @Test - public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - - Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); - Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); - - Map dataPlatformInstance1Aspects = new HashMap<>(); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - dataPlatformInstance1Urn, - new EntityResponse() - .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) - .setUrn(dataPlatformInstance1Urn) - .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); - - DataPlatformInstanceType type = new DataPlatformInstanceType(client); - - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); - List> result = type.batchLoad( - ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), mockContext); - - // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( + private static final Urn TEST_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); + + private static final String TEST_DATAPLATFORMINSTANCE_1_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; + + private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY = + new DataPlatformInstanceKey() + .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) + .setInstance("I1"); + + private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES = + new DataPlatformInstanceProperties() + .setDescription("test description") + .setName("Test Data Platform Instance"); + + private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = + new Deprecation().setDeprecated(true).setActor(TEST_ACTOR_URN).setNote("legacy"); + + private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setType(OwnershipType.DATAOWNER).setOwner(TEST_ACTOR_URN)))); + + private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); + + private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + + private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status().setRemoved(false); + + private static final String TEST_DATAPLATFORMINSTANCE_2_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; + + @Test + public void testBatchLoad() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); + Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); + + Map dataPlatformInstance1Aspects = new HashMap<>(); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data()))); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data()))); + dataPlatformInstance1Aspects.put( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data()))); + dataPlatformInstance1Aspects.put( + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data()))); + dataPlatformInstance1Aspects.put( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data()))); + dataPlatformInstance1Aspects.put( + Constants.GLOBAL_TAGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data()))); + dataPlatformInstance1Aspects.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data()))); + Mockito.when( + client.batchGetV2( Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq( + new HashSet<>( + ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); - - assertEquals(result.size(), 2); - - DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); - assertEquals( - dataPlatformInstance1.getUrn(), - TEST_DATAPLATFORMINSTANCE_1_URN - ); - assertEquals( - dataPlatformInstance1.getType(), - EntityType.DATA_PLATFORM_INSTANCE - ); - assertEquals( - dataPlatformInstance1.getProperties().getDescription(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription() - ); - assertEquals( - dataPlatformInstance1.getProperties().getName(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getDeprecated(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getNote(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getActor(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString() - ); - assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); - assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); - assertEquals( - dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), - TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString() - ); - assertEquals( - dataPlatformInstance1.getStatus().getRemoved(), - TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue() - ); - - // Assert second element is null. - assertNull(result.get(1)); - } - - @Test - public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type - = new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType(mockClient); - - // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of( - TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), context)); - } + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + dataPlatformInstance1Urn, + new EntityResponse() + .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .setUrn(dataPlatformInstance1Urn) + .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); + + DataPlatformInstanceType type = new DataPlatformInstanceType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + List> result = + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); + assertEquals(dataPlatformInstance1.getUrn(), TEST_DATAPLATFORMINSTANCE_1_URN); + assertEquals(dataPlatformInstance1.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals( + dataPlatformInstance1.getProperties().getDescription(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription()); + assertEquals( + dataPlatformInstance1.getProperties().getName(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName()); + assertEquals( + dataPlatformInstance1.getDeprecation().getDeprecated(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue()); + assertEquals( + dataPlatformInstance1.getDeprecation().getNote(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote()); + assertEquals( + dataPlatformInstance1.getDeprecation().getActor(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString()); + assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); + assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); + assertEquals( + dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), + TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString()); + assertEquals( + dataPlatformInstance1.getStatus().getRemoved(), + TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type = + new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType( + mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + context)); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 3d22f1c429fd6..1959ae6d43208 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -8,146 +8,165 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; public class DatasetMapperTest { - private static final Urn TEST_DATASET_URN = Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); - private static final Urn TEST_CREATED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); - private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); - - @Test - public void testDatasetPropertiesMapperWithCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - input.setQualifiedName("Test QualifiedName"); - - final TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - final TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setQualifiedName("Test QualifiedName"); - expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); - expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - Assert.assertEquals(actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); - expectedDatasetProperties.setCreated(null); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutTimestampActors() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } + private static final Urn TEST_DATASET_URN = + Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); + private static final Urn TEST_CREATED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); + private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); + + @Test + public void testDatasetPropertiesMapperWithCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + input.setQualifiedName("Test QualifiedName"); + + final TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + final TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setQualifiedName("Test QualifiedName"); + expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); + expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + Assert.assertEquals( + actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(null); + expectedDatasetProperties.setCreated(null); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutTimestampActors() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(20L); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 78cdaa0a276da..612136d1f9164 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -19,90 +19,128 @@ public void testMapperFullProfile() { input.setRowCount(10L); input.setColumnCount(45L); input.setSizeInBytes(15L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setMax("1") - .setMean("2") - .setStdev("3") - .setMedian("4") - .setMin("5") - .setNullCount(20L) - .setNullProportion(20.5f) - .setUniqueCount(30L) - .setUniqueProportion(30.5f) - .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setNullCount(30L) - .setNullProportion(30.5f) - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - .setSampleValues(new StringArray(ImmutableList.of("val3", "val4"))) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setMax("1") + .setMean("2") + .setStdev("3") + .setMedian("4") + .setMin("5") + .setNullCount(20L) + .setNullProportion(20.5f) + .setUniqueCount(30L) + .setUniqueProportion(30.5f) + .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setNullCount(30L) + .setNullProportion(30.5f) + .setUniqueCount(40L) + .setUniqueProportion(40.5f) + .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); expected.setSizeInBytes(15L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - 20L, - 20.5f, - "5", - "1", - "2", - "4", - "3", - new ArrayList<>(ImmutableList.of("val1", "val2"))), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - 30L, - 30.5f, - "6", - "2", - "3", - "5", - "4", - new ArrayList<>(ImmutableList.of("val3", "val4"))) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", + 30L, + 30.5f, + 20L, + 20.5f, + "5", + "1", + "2", + "4", + "3", + new ArrayList<>(ImmutableList.of("val1", "val2"))), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", + 40L, + 40.5f, + 30L, + 30.5f, + "6", + "2", + "3", + "5", + "4", + new ArrayList<>(ImmutableList.of("val3", "val4")))))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } @Test @@ -111,77 +149,95 @@ public void testMapperPartialProfile() { input.setTimestampMillis(1L); input.setRowCount(10L); input.setColumnCount(45L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setUniqueCount(30L) - .setUniqueProportion(30.5f), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setUniqueCount(30L) + .setUniqueProportion(30.5f), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setUniqueCount(40L) + .setUniqueProportion(40.5f)))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - null, - null, - null, - null, - null, - null, - null, - null), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - null, - null, - "6", - "2", - "3", - "5", - "4", - null) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", 30L, 30.5f, null, null, null, null, null, null, null, null), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", 40L, 40.5f, null, null, "6", "2", "3", "5", "4", null)))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java index 48c23f436f875..32735ad7874a0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,33 +33,34 @@ import java.util.HashSet; import java.util.List; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class DomainTypeTest { private static final String TEST_DOMAIN_1_URN = "urn:li:domain:id-1"; - private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey() - .setId("id-1"); - private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = new DomainProperties() - .setDescription("test description") - .setName("Test Domain"); - private static final Ownership TEST_DOMAIN_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey().setId("id-1"); + private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = + new DomainProperties().setDescription("test description").setName("Test Domain"); + private static final Ownership TEST_DOMAIN_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); private static final String TEST_DOMAIN_2_URN = "urn:li:domain:id-2"; @@ -68,39 +72,48 @@ public void testBatchLoad() throws Exception { Urn domainUrn1 = Urn.createFromString(TEST_DOMAIN_1_URN); Urn domainUrn2 = Urn.createFromString(TEST_DOMAIN_2_URN); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - domainUrn1, - new EntityResponse() - .setEntityName(Constants.DOMAIN_ENTITY_NAME) - .setUrn(domainUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data())) - ))))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + domainUrn1, + new EntityResponse() + .setEntityName(Constants.DOMAIN_ENTITY_NAME) + .setUrn(domainUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data()))))))); DomainType type = new DomainType(client); QueryContext mockContext = getMockAllowContext(); - List> result = type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -120,17 +133,20 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); DomainType type = new DomainType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index 918616a2705b7..f88c8285e20df 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,16 +33,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.notebook.NotebookCell; -import com.linkedin.notebook.NotebookCellArray; -import com.linkedin.notebook.NotebookCellType; -import com.linkedin.notebook.NotebookContent; -import com.linkedin.notebook.NotebookInfo; -import com.linkedin.notebook.EditableNotebookProperties; -import com.linkedin.notebook.TextCell; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.domain.Domains; import com.linkedin.entity.Aspect; @@ -50,6 +45,13 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.notebook.EditableNotebookProperties; +import com.linkedin.notebook.NotebookCell; +import com.linkedin.notebook.NotebookCellArray; +import com.linkedin.notebook.NotebookCellType; +import com.linkedin.notebook.NotebookContent; +import com.linkedin.notebook.NotebookInfo; +import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -60,58 +62,75 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class NotebookTypeTest { private static final String TEST_NOTEBOOK = "urn:li:notebook:(querybook,123)"; - private static final NotebookKey NOTEBOOK_KEY = new NotebookKey() - .setNotebookId("123") - .setNotebookTool("querybook"); - private static final NotebookContent NOTEBOOK_CONTENT = new NotebookContent() - .setCells(new NotebookCellArray(ImmutableList.of(new NotebookCell() - .setType(NotebookCellType.TEXT_CELL) - .setTextCell(new TextCell() - .setCellId("1234") - .setCellTitle("test cell") - .setText("test text") - .setChangeAuditStamps(new ChangeAuditStamps()))))); - private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = new EditableNotebookProperties() - .setDescription("test editable description"); - private static final Ownership OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - - private static final SubTypes SUB_TYPES = new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); - - private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("test_platform")); - - private static final NotebookInfo NOTEBOOK_INFO = new NotebookInfo() - .setTitle("title") - .setExternalUrl(new Url("https://querybook.com/notebook/123")) - .setChangeAuditStamps(new ChangeAuditStamps()) - .setDescription("test doc"); - - private static final Status STATUS = new Status() - .setRemoved(false); - - private static final Domains DOMAINS = new Domains() - .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); - private static final GlobalTags GLOBAL_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final NotebookKey NOTEBOOK_KEY = + new NotebookKey().setNotebookId("123").setNotebookTool("querybook"); + private static final NotebookContent NOTEBOOK_CONTENT = + new NotebookContent() + .setCells( + new NotebookCellArray( + ImmutableList.of( + new NotebookCell() + .setType(NotebookCellType.TEXT_CELL) + .setTextCell( + new TextCell() + .setCellId("1234") + .setCellTitle("test cell") + .setText("test text") + .setChangeAuditStamps(new ChangeAuditStamps()))))); + private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = + new EditableNotebookProperties().setDescription("test editable description"); + private static final Ownership OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + + private static final SubTypes SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); + + private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("test_platform")); + + private static final NotebookInfo NOTEBOOK_INFO = + new NotebookInfo() + .setTitle("title") + .setExternalUrl(new Url("https://querybook.com/notebook/123")) + .setChangeAuditStamps(new ChangeAuditStamps()) + .setDescription("test doc"); + + private static final Status STATUS = new Status().setRemoved(false); + + private static final Domains DOMAINS = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); + private static final GlobalTags GLOBAL_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); @Test public void testBatchLoad() throws Exception { @@ -121,79 +140,69 @@ public void testBatchLoad() throws Exception { Map notebookAspects = new HashMap<>(); notebookAspects.put( Constants.NOTEBOOK_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data()))); notebookAspects.put( Constants.NOTEBOOK_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data()))); notebookAspects.put( Constants.NOTEBOOK_CONTENT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data()))); notebookAspects.put( Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data()))); notebookAspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data()))); notebookAspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data()))); notebookAspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(STATUS.data())) - ); + Constants.STATUS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(STATUS.data()))); notebookAspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data()))); notebookAspects.put( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(DOMAINS.data())) - ); + Constants.DOMAINS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DOMAINS.data()))); notebookAspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data()))); notebookAspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data())) - ); - notebookAspects.put(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data()))); + notebookAspects.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DATA_PLATFORM_INSTANCE.data()))); Urn notebookUrn = new NotebookUrn("querybook", "123"); Urn dummyNotebookUrn = new NotebookUrn("querybook", "dummy"); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - notebookUrn, - new EntityResponse() - .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) - .setUrn(notebookUrn) - .setAspects(new EnvelopedAspectMap(notebookAspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + notebookUrn, + new EntityResponse() + .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) + .setUrn(notebookUrn) + .setAspects(new EnvelopedAspectMap(notebookAspects)))); NotebookType type = new NotebookType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> - result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -201,13 +210,17 @@ public void testBatchLoad() throws Exception { Notebook notebook = result.get(0).getData(); assertEquals(notebook.getContent().getCells().size(), NOTEBOOK_CONTENT.getCells().size()); - assertEquals(notebook.getContent().getCells().get(0).getType().toString(), + assertEquals( + notebook.getContent().getCells().get(0).getType().toString(), NOTEBOOK_CONTENT.getCells().get(0).getType().toString()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellId(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellId(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellId()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellTitle()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getText(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getText(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getText()); assertEquals(notebook.getInfo().getDescription(), NOTEBOOK_INFO.getDescription()); assertEquals(notebook.getInfo().getExternalUrl(), NOTEBOOK_INFO.getExternalUrl().toString()); @@ -217,11 +230,17 @@ public void testBatchLoad() throws Exception { assertEquals(notebook.getType(), EntityType.NOTEBOOK); assertEquals(notebook.getOwnership().getOwners().size(), 1); assertEquals(notebook.getInstitutionalMemory().getElements().size(), 1); - assertEquals(notebook.getEditableProperties().getDescription(), TEST_EDITABLE_DESCRIPTION.getDescription()); - assertEquals(notebook.getTags().getTags().get(0).getTag().getUrn(), + assertEquals( + notebook.getEditableProperties().getDescription(), + TEST_EDITABLE_DESCRIPTION.getDescription()); + assertEquals( + notebook.getTags().getTags().get(0).getTag().getUrn(), GLOBAL_TAGS.getTags().get(0).getTag().toString()); - assertEquals(notebook.getSubTypes().getTypeNames(), SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); - assertEquals(notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), + assertEquals( + notebook.getSubTypes().getTypeNames(), + SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); + assertEquals( + notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); assertEquals(notebook.getPlatform().getUrn(), DATA_PLATFORM_INSTANCE.getPlatform().toString()); @@ -232,17 +251,19 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), - context)); + assertThrows( + RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index a3c089b91de87..c8f694320d88a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.types.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.query.QueryLanguage; -import com.linkedin.query.QueryProperties; -import com.linkedin.query.QuerySource; -import com.linkedin.query.QueryStatement; -import com.linkedin.query.QuerySubject; -import com.linkedin.query.QuerySubjectArray; -import com.linkedin.query.QuerySubjects; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import com.linkedin.query.QuerySubject; +import com.linkedin.query.QuerySubjectArray; +import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -30,53 +32,50 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class QueryTypeTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); private static final Urn TEST_QUERY_2_URN = UrnUtils.getUrn("urn:li:query:test-2"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); - private static final Urn TEST_DATASET_2_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final QueryProperties TEST_QUERY_PROPERTIES_1 = new QueryProperties() - .setName("Query Name") - .setDescription("Query Description") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ) - )); - private static final QueryProperties TEST_QUERY_PROPERTIES_2 = new QueryProperties() - .setName("Query Name 2") - .setDescription("Query Description 2") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable2") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_2_URN) - ) - )); + private static final QueryProperties TEST_QUERY_PROPERTIES_1 = + new QueryProperties() + .setName("Query Name") + .setDescription("Query Description") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + private static final QueryProperties TEST_QUERY_PROPERTIES_2 = + new QueryProperties() + .setName("Query Name 2") + .setDescription("Query Description 2") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable2")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_2_URN)))); @Test public void testBatchLoad() throws Exception { @@ -87,38 +86,54 @@ public void testBatchLoad() throws Exception { Urn queryUrn2 = TEST_QUERY_2_URN; Map query1Aspects = new HashMap<>(); - query1Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); - query1Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); Map query2Aspects = new HashMap<>(); - query2Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_2.data()))); - query2Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_2.data()))); - Mockito.when(client.batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(queryUrn1, new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)), queryUrn2, - new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn2) - .setAspects(new EnvelopedAspectMap(query2Aspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)), + queryUrn2, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn2) + .setAspects(new EnvelopedAspectMap(query2Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); List> result = - type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response Mockito.verify(client, Mockito.times(1)) - .batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), Mockito.eq(QueryType.ASPECTS_TO_FETCH), + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -141,40 +156,39 @@ public void testBatchLoadNullEntity() throws Exception { Map query1Aspects = new HashMap<>(); query1Aspects.put( Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); query1Aspects.put( Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - queryUrn1, - new EntityResponse() - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of( - TEST_QUERY_URN.toString(), - TEST_QUERY_2_URN.toString()), - mockContext); + List> result = + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), - Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -188,18 +202,23 @@ public void testBatchLoadNullEntity() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); QueryType type = new QueryType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), context)); } private void verifyQuery1(QueryEntity query) { @@ -207,14 +226,30 @@ private void verifyQuery1(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_1.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_1.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_1.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_1.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_1.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_1.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_1.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_1.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); } private void verifyQuery2(QueryEntity query) { @@ -222,13 +257,29 @@ private void verifyQuery2(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_2.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_2.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_2.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_2.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_2.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_2.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_2.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_2.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index 7f3c8f99f6593..f02fd38e2ca7c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,114 +1,149 @@ package com.linkedin.datahub.graphql.types.view; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubView; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.DataHubView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.entity.Aspect; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; -import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class DataHubViewTypeTest { private static final String TEST_VIEW_URN = "urn:li:dataHubView:test"; private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + /** * A Valid View is one which is minted by the createView or updateView GraphQL resolvers. * - * View Definitions currently support a limited Filter structure, which includes a single Logical filter set. - * Either a set of OR criteria with 1 value in each nested "and", or a single OR criteria with a set of nested ANDs. + *

View Definitions currently support a limited Filter structure, which includes a single + * Logical filter set. Either a set of OR criteria with 1 value in each nested "and", or a single + * OR criteria with a set of nested ANDs. * - * This enables us to easily support merging more complex View predicates in the future without a data migration, - * should the need arise. + *

This enables us to easily support merging more complex View predicates in the future without + * a data migration, should the need arise. */ - private static final DataHubViewInfo TEST_VALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_VALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); /** - * An Invalid View is on which has been ingested manually, which should not occur under normal operation of DataHub. + * An Invalid View is on which has been ingested manually, which should not occur under normal + * operation of DataHub. * - * This would be a complex view with multiple OR and nested AND predicates. + *

This would be a complex view with multiple OR and nested AND predicates. */ - private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))), - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); private static final String TEST_VIEW_URN_2 = "urn:li:dataHubView:test2"; @@ -123,33 +158,37 @@ public void testBatchLoadValidView() throws Exception { Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - viewUrn1, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(viewUrn1) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + viewUrn1, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(viewUrn1) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -164,9 +203,12 @@ public void testBatchLoadValidView() throws Exception { assertEquals(view.getDefinition().getEntityTypes().get(1), EntityType.DASHBOARD); assertEquals(view.getDefinition().getFilter().getOperator(), LogicalOperator.AND); assertEquals(view.getDefinition().getFilter().getFilters().size(), 1); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); assertEquals(view.getDefinition().getFilter().getFilters().get(0).getField(), "test"); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getValues(), ImmutableList.of("value1", "value2")); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getValues(), + ImmutableList.of("value1", "value2")); // Assert second element is null. assertNull(result.get(1)); @@ -174,40 +216,45 @@ public void testBatchLoadValidView() throws Exception { @Test public void testBatchLoadInvalidView() throws Exception { - // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log a warning). + // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log + // a warning). EntityClient client = Mockito.mock(EntityClient.class); Urn invalidViewUrn = Urn.createFromString(TEST_VIEW_URN); Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - invalidViewUrn, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(invalidViewUrn) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + invalidViewUrn, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(invalidViewUrn) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(invalidViewUrn)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(invalidViewUrn)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 1); @@ -227,17 +274,21 @@ public void testBatchLoadInvalidView() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java index 0a58ff88586c6..6ecbc8d015b29 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java @@ -1,57 +1,50 @@ package com.linkedin.datahub.graphql.utils; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.datahub.graphql.util.DateUtil; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - public class DateUtilTest { - private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { - DateTime result = new DateTime() - .withDate(2023, 1, dayOfMonth); - if (zeroTime) { - return new DateUtil().setTimeToZero(result); - } - return result - .withHourOfDay(1) - .withMinuteOfHour(2) - .withSecondOfMinute(3) - .withMillisOfSecond(4); + private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { + DateTime result = new DateTime().withDate(2023, 1, dayOfMonth); + if (zeroTime) { + return new DateUtil().setTimeToZero(result); } + return result.withHourOfDay(1).withMinuteOfHour(2).withSecondOfMinute(3).withMillisOfSecond(4); + } - private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { - assertEquals( - setTimeParts(dayOfMonth, true).getMillis(), - dateUtil.getStartOfNextWeek().getMillis() - ); - } + private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { + assertEquals( + setTimeParts(dayOfMonth, true).getMillis(), dateUtil.getStartOfNextWeek().getMillis()); + } - @Test - public void testStartOfNextWeek() { - DateUtil dateUtil = Mockito.spy(DateUtil.class); + @Test + public void testStartOfNextWeek() { + DateUtil dateUtil = Mockito.spy(DateUtil.class); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); - assertEqualStartOfNextWeek(dateUtil, 9); - } + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); + assertEqualStartOfNextWeek(dateUtil, 9); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java index 48ce2ddb6dde4..0419fe0b5254d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.*; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.identity.CorpUserInfo; @@ -7,19 +10,24 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.AssertJUnit.*; - - public class MutationsUtilsTest { @Test public void testBuildMetadataChangeProposal() { - MetadataChangeProposal metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn("urn:li:corpuser:datahub"), CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); - metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithKey(new CorpUserKey().setUsername("datahub"), - CORP_USER_ENTITY_NAME, CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + MetadataChangeProposal metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn("urn:li:corpuser:datahub"), + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithKey( + new CorpUserKey().setUsername("datahub"), + CORP_USER_ENTITY_NAME, + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java index adbc6808b5ab9..005b47df56982 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.types.common.mappers.util.RunInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.entity.EnvelopedAspect; @@ -8,10 +11,6 @@ import java.util.List; import org.testng.annotations.Test; -import static org.testng.Assert.*; - -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtilsTest { private final Long recentLastObserved = 1660056070640L; @@ -21,15 +20,21 @@ public class SystemMetadataUtilsTest { @Test public void testGetLastIngestedTime() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertEquals(lastObserved, mediumLastObserved); @@ -38,15 +43,21 @@ public void testGetLastIngestedTime() { @Test public void testGetLastIngestedRunId() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); String lastRunId = SystemMetadataUtils.getLastIngestedRunId(aspectMap); assertEquals(lastRunId, "real-id-1"); @@ -55,15 +66,21 @@ public void testGetLastIngestedRunId() { @Test public void testGetLastIngestedRuns() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); List runs = SystemMetadataUtils.getLastIngestionRuns(aspectMap); @@ -75,15 +92,23 @@ public void testGetLastIngestedRuns() { @Test public void testGetLastIngestedTimeAllDefaultRunIds() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("default-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved) - )); - aspectMap.put("default-run-id3", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "default-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved))); + aspectMap.put( + "default-run-id3", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertNull(lastObserved, null); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java index c42e1bb7f92e0..d3aea2a3dac12 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java @@ -1,30 +1,19 @@ package com.linkedin.datahub.upgrade; import com.google.common.collect.ImmutableList; - import java.util.List; - -/** - * Specification of an upgrade to be performed to the DataHub platform. - */ +/** Specification of an upgrade to be performed to the DataHub platform. */ public interface Upgrade { - /** - * String identifier for the upgrade. - */ + /** String identifier for the upgrade. */ String id(); - /** - * Returns a set of steps to perform during the upgrade. - */ + /** Returns a set of steps to perform during the upgrade. */ List steps(); - /** - * Returns a set of steps to perform on upgrade success, failure, or abort. - */ + /** Returns a set of steps to perform on upgrade success, failure, or abort. */ default List cleanupSteps() { return ImmutableList.of(); } - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java index bf356c60a21a4..6da656020edf8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java @@ -2,21 +2,15 @@ import java.util.function.BiConsumer; - /** * Step executed on finish of an {@link Upgrade}. * - * Note that this step is not retried, even in case of failures. + *

Note that this step is not retried, even in case of failures. */ public interface UpgradeCleanupStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the cleanup step's logic. - */ + /** Returns a function representing the cleanup step's logic. */ BiConsumer executable(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index e6be6905accee..eee27096e2238 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.datahub.upgrade.removeunknownaspects.RemoveUnknownAspects; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import java.util.List; import javax.inject.Inject; import javax.inject.Named; @@ -17,7 +17,6 @@ import org.springframework.stereotype.Component; import picocli.CommandLine; - @Slf4j @Component public class UpgradeCli implements CommandLineRunner { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 53a5c0758f318..909ceeb8f3bab 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -8,18 +8,23 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@ComponentScan(basePackages = { - "com.linkedin.gms.factory", - "com.linkedin.datahub.upgrade.config", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class) -}) +@ComponentScan( + basePackages = { + "com.linkedin.gms.factory", + "com.linkedin.datahub.upgrade.config", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class) + }) public class UpgradeCliApplication { public static void main(String[] args) { - new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class) + .web(WebApplicationType.NONE) + .run(args); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java index 76cfc6321adfd..25a3d44b6e9da 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java @@ -4,35 +4,21 @@ import java.util.Map; import java.util.Optional; - -/** - * Context about a currently running upgrade. - */ +/** Context about a currently running upgrade. */ public interface UpgradeContext { - /** - * Returns the currently running upgrade. - */ + /** Returns the currently running upgrade. */ Upgrade upgrade(); - /** - * Returns the results from steps that have been completed. - */ + /** Returns the results from steps that have been completed. */ List stepResults(); - /** - * Returns a report object where human-readable messages can be logged. - */ + /** Returns a report object where human-readable messages can be logged. */ UpgradeReport report(); - /** - * Returns a list of raw arguments that have been provided as input to the upgrade. - */ + /** Returns a list of raw arguments that have been provided as input to the upgrade. */ List args(); - /** - * Returns a map of argument to <>optional value, as delimited by an '=' character. - */ + /** Returns a map of argument to <>optional value, as delimited by an '=' character. */ Map> parsedArgs(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java index 927ccc0578308..c01aca12254a3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java @@ -2,20 +2,12 @@ import java.util.List; - -/** - * Responsible for managing the execution of an {@link Upgrade}. - */ +/** Responsible for managing the execution of an {@link Upgrade}. */ public interface UpgradeManager { - /** - * Register an {@link Upgrade} with the manaager. - */ + /** Register an {@link Upgrade} with the manaager. */ void register(Upgrade upgrade); - /** - * Kick off an {@link Upgrade} by identifier. - */ + /** Kick off an {@link Upgrade} by identifier. */ UpgradeResult execute(String upgradeId, List args); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java index 2ed3f105a4eda..1c677f6fe8578 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java @@ -2,25 +2,15 @@ import java.util.List; - -/** - * A human-readable record of upgrade progress + status. - */ +/** A human-readable record of upgrade progress + status. */ public interface UpgradeReport { - /** - * Adds a new line to the upgrade report. - */ + /** Adds a new line to the upgrade report. */ void addLine(String line); - /** - * Adds a new line to the upgrade report with exception - */ + /** Adds a new line to the upgrade report with exception */ void addLine(String line, Exception e); - /** - * Retrieves the lines in the report. - */ + /** Retrieves the lines in the report. */ List lines(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java index cdb94f0c0bba1..25dc758575fd1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java @@ -1,36 +1,21 @@ package com.linkedin.datahub.upgrade; -/** - * Represents the result of executing an {@link Upgrade} - */ +/** Represents the result of executing an {@link Upgrade} */ public interface UpgradeResult { - /** - * The execution result. - */ + /** The execution result. */ enum Result { - /** - * Upgrade succeeded. - */ + /** Upgrade succeeded. */ SUCCEEDED, - /** - * Upgrade failed. - */ + /** Upgrade failed. */ FAILED, - /** - * Upgrade was aborted. - */ + /** Upgrade was aborted. */ ABORTED } - /** - * Returns the {@link Result} of executing an {@link Upgrade} - */ + /** Returns the {@link Result} of executing an {@link Upgrade} */ Result result(); - /** - * Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. - */ + /** Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. */ UpgradeReport report(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java index b85bd7a51e3dd..3f90dcb33a005 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java @@ -2,39 +2,29 @@ import java.util.function.Function; - -/** - * Represents a single executable step in an {@link Upgrade}. - */ +/** Represents a single executable step in an {@link Upgrade}. */ public interface UpgradeStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the step's execution logic. - */ + /** Returns a function representing the step's execution logic. */ Function executable(); - /** - * Returns the number of times the step should be retried. - */ + /** Returns the number of times the step should be retried. */ default int retryCount() { return 0; } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ default boolean isOptional() { return false; } - /** - * Returns whether or not to skip the step based on the UpgradeContext - */ + /** Returns whether or not to skip the step based on the UpgradeContext */ default boolean skip(UpgradeContext context) { return false; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java index 60d51f9ba476c..04b3d4b8559e6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java @@ -2,52 +2,33 @@ public interface UpgradeStepResult { - /** - * Returns a string identifier associated with the step. - */ + /** Returns a string identifier associated with the step. */ String stepId(); - /** - * The outcome of the step execution. - */ + /** The outcome of the step execution. */ enum Result { - /** - * The step succeeded. - */ + /** The step succeeded. */ SUCCEEDED, - /** - * The step failed. - */ + /** The step failed. */ FAILED } - /** - * A control-flow action to perform as a result of the step execution. - */ + /** A control-flow action to perform as a result of the step execution. */ enum Action { - /** - * Continue attempting the upgrade. - */ + /** Continue attempting the upgrade. */ CONTINUE, - /** - * Immediately fail the upgrade, without retry. - */ + /** Immediately fail the upgrade, without retry. */ FAIL, - /** - * Immediately abort the upgrade, without retry. - */ + /** Immediately abort the upgrade, without retry. */ ABORT } - /** - * Returns the result of executing the step, either success or failure. - */ + /** Returns the result of executing the step, either success or failure. */ Result result(); - /** - * Returns the action to perform after executing the step, either continue or abort. - */ + /** Returns the action to perform after executing the step, either continue or abort. */ default Action action() { return Action.CONTINUE; - }; + } + ; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java index a6f3ef5560442..8d5f1118433fc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java @@ -19,10 +19,12 @@ public static Map> parseArgs(final List args) { for (final String arg : args) { List parsedArg = Arrays.asList(arg.split(KEY_VALUE_DELIMITER, 2)); - parsedArgs.put(parsedArg.get(0), parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); + parsedArgs.put( + parsedArg.get(0), + parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); } return parsedArgs; } - private UpgradeUtils() { } + private UpgradeUtils() {} } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java index 4f980b11b888a..393b5411599ad 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.graph.GraphService; import java.util.function.Function; - public class ClearGraphServiceStep implements UpgradeStep { private final String deletePattern = ".*"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java index fca8f60aefd95..230f5a60cb9ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java @@ -8,13 +8,13 @@ import com.linkedin.metadata.search.EntitySearchService; import java.util.function.Function; - public class ClearSearchServiceStep implements UpgradeStep { private final EntitySearchService _entitySearchService; private final boolean _alwaysRun; - public ClearSearchServiceStep(final EntitySearchService entitySearchService, final boolean alwaysRun) { + public ClearSearchServiceStep( + final EntitySearchService entitySearchService, final boolean alwaysRun) { _entitySearchService = entitySearchService; _alwaysRun = alwaysRun; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index 270aa11c7b070..dd6c3fd1e44aa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8df02123983e8..8a0d374d6ee3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { private final SystemRestliEntityClient _entityClient; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java index 1391ef685c335..4e7447cb1e2cb 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.common.steps; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,9 +20,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class GMSQualificationStep implements UpgradeStep { @@ -70,9 +69,16 @@ private boolean isEligible(ObjectNode configJson) { @Override public Function executable() { return (context) -> { - String gmsHost = System.getenv("DATAHUB_GMS_HOST") == null ? "localhost" : System.getenv("DATAHUB_GMS_HOST"); - String gmsPort = System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); - String gmsProtocol = System.getenv("DATAHUB_GMS_PROTOCOL") == null ? "http" : System.getenv("DATAHUB_GMS_PROTOCOL"); + String gmsHost = + System.getenv("DATAHUB_GMS_HOST") == null + ? "localhost" + : System.getenv("DATAHUB_GMS_HOST"); + String gmsPort = + System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); + String gmsProtocol = + System.getenv("DATAHUB_GMS_PROTOCOL") == null + ? "http" + : System.getenv("DATAHUB_GMS_PROTOCOL"); try { String spec = String.format("%s://%s:%s/config", gmsProtocol, gmsHost, gmsPort); @@ -81,33 +87,37 @@ public Function executable() { String responseString = convertStreamToString(response); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, - MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode configJson = mapper.readTree(responseString); if (isEligible((ObjectNode) configJson)) { - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } else { - context.report().addLine(String.format("Failed to qualify GMS. It is not running on the latest version." - + "Re-run GMS on the latest datahub release")); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "Failed to qualify GMS. It is not running on the latest version." + + "Re-run GMS on the latest datahub release")); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } catch (Exception e) { e.printStackTrace(); - context.report().addLine(String.format("ERROR: Cannot connect to GMS" - + "at %s://host %s port %s. Make sure GMS is on the latest version " - + "and is running at that host before starting the migration.", - gmsProtocol, - gmsHost, - gmsPort)); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "ERROR: Cannot connect to GMS" + + "at %s://host %s port %s. Make sure GMS is on the latest version " + + "and is running at that host before starting the migration.", + gmsProtocol, gmsHost, gmsPort)); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 16e5e4247267f..abd144bf453ed 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -6,12 +6,12 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2 backfillBrowsePathsV2( + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index e98f0dc2093f6..1e9298bc60612 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public BuildIndices buildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new BuildIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new BuildIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 558c9780911ac..5bd7244a92e45 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CleanIndicesConfig { @Bean(name = "cleanIndices") - public CleanIndices cleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public CleanIndices cleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new CleanIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new CleanIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ea81009fa1d..24bcec5852b4f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.config; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,17 +14,18 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration public class NoCodeCleanupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeCleanup") - @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) + @DependsOn({ + "ebeanServer", + "graphService", + "elasticSearchRestHighLevelClient", + INDEX_CONVENTION_BEAN + }) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index cd264e529e9a5..68009d7ed1718 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -12,12 +12,10 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class NoCodeUpgradeConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) @@ -25,7 +23,8 @@ public class NoCodeUpgradeConfig { public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index cdc739efc416d..0b46133209382 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -5,7 +5,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 97a08800534de..743e4ffe84b0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -14,25 +14,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreBackupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreBackup") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "graphService", - "searchService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemRestliEntityClient", + "graphService", + "searchService", + "entityRegistry" + }) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreBackup(ebeanServer, entityService, entityRegistry, entityClient, - graphClient, searchClient); + return new RestoreBackup( + ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 663cad4a4bff6..d258c4a4d1a52 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -13,11 +13,9 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration public class RestoreIndicesConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) @@ -25,11 +23,12 @@ public class RestoreIndicesConfig { public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices(ebeanServer, entityService, entityRegistry, entitySearchService, - graphService); + return new RestoreIndices( + ebeanServer, entityService, entityRegistry, entitySearchService, graphService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 9848fc7a0008f..3b63d81486eb4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -24,18 +24,21 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Slf4j @Configuration public class SystemUpdateConfig { @Bean(name = "systemUpdate") - public SystemUpdate systemUpdate(final BuildIndices buildIndices, final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate systemUpdate( + final BuildIndices buildIndices, + final CleanIndices cleanIndices, + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate(buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + return new SystemUpdate( + buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -50,16 +53,18 @@ public String getRevision() { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "duheKafkaEventProducer") - protected KafkaEventProducer duheKafkaEventProducer(@Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties properties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + protected KafkaEventProducer duheKafkaEventProducer( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Producer producer = new KafkaProducer<>( - DataHubKafkaProducerFactory.buildProducerProperties(duheSchemaRegistryConfig, kafkaConfiguration, properties)); + Producer producer = + new KafkaProducer<>( + DataHubKafkaProducerFactory.buildProducerProperties( + duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 972b55f2001f1..6cc94fbed5bf3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -9,7 +9,6 @@ import java.util.Map; import java.util.Optional; - public class DefaultUpgradeContext implements UpgradeContext { private final Upgrade _upgrade; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index a642ee3fb0a90..623c8a71e861d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -17,7 +17,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -32,16 +31,19 @@ public UpgradeResult execute(String upgradeId, List args) { if (_upgrades.containsKey(upgradeId)) { return executeInternal(_upgrades.get(upgradeId), args); } - throw new IllegalArgumentException(String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); + throw new IllegalArgumentException( + String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); } private UpgradeResult executeInternal(Upgrade upgrade, List args) { final UpgradeReport upgradeReport = new DefaultUpgradeReport(); - final UpgradeContext context = new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); + final UpgradeContext context = + new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); upgradeReport.addLine(String.format("Starting upgrade with id %s...", upgrade.id())); UpgradeResult result = executeInternal(context); upgradeReport.addLine( - String.format("Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); + String.format( + "Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); executeCleanupInternal(context, result); return result; } @@ -58,12 +60,16 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (step.skip(context)) { upgradeReport.addLine( - String.format(String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); continue; } upgradeReport.addLine( - String.format(String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); final UpgradeStepResult stepResult = executeStepInternal(context, step); stepResults.add(stepResult); @@ -71,7 +77,8 @@ private UpgradeResult executeInternal(UpgradeContext context) { // Apply Actions if (UpgradeStepResult.Action.ABORT.equals(stepResult.action())) { upgradeReport.addLine( - String.format("Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", + String.format( + "Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", step.id())); return new DefaultUpgradeResult(UpgradeResult.Result.ABORTED, upgradeReport); } @@ -80,23 +87,27 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (UpgradeStepResult.Result.FAILED.equals(stepResult.result())) { if (step.isOptional()) { upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", i + 1, - steps.size(), step.id())); + String.format( + "Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", + i + 1, steps.size(), step.id())); continue; } // Required step failed. Fail the entire upgrade process. upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Failed after %s retries.", i + 1, steps.size(), step.id(), - step.retryCount())); + String.format( + "Failed Step %s/%s: %s. Failed after %s retries.", + i + 1, steps.size(), step.id(), step.retryCount())); upgradeReport.addLine(String.format("Exiting upgrade %s with failure.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.FAILED, upgradeReport); } - upgradeReport.addLine(String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); + upgradeReport.addLine( + String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); } - upgradeReport.addLine(String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); + upgradeReport.addLine( + String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.SUCCEEDED, upgradeReport); } @@ -105,15 +116,19 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte UpgradeStepResult result = null; int maxAttempts = retryCount + 1; for (int i = 0; i < maxAttempts; i++) { - try (Timer.Context completionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { - try (Timer.Context executionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { + try (Timer.Context completionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { + try (Timer.Context executionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { result = step.executable().apply(context); } if (result == null) { // Failed to even retrieve a result. Create a default failure result. result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); - context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); + context + .report() + .addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); MetricUtils.counter(MetricRegistry.name(step.id(), "retry")).inc(); } @@ -122,9 +137,11 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); + String.format( + "Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); MetricUtils.counter(MetricRegistry.name(step.id(), "failed")).inc(); result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); @@ -139,7 +156,11 @@ private void executeCleanupInternal(UpgradeContext context, UpgradeResult result try { step.executable().accept(context, result); } catch (Exception e) { - context.report().addLine(String.format("Caught exception while executing cleanup step with id %s", step.id())); + context + .report() + .addLine( + String.format( + "Caught exception while executing cleanup step with id %s", step.id())); } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java index 19706937e20ca..913b0ff20e6ff 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java @@ -1,10 +1,9 @@ package com.linkedin.datahub.upgrade.impl; import com.linkedin.datahub.upgrade.UpgradeReport; -import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultUpgradeReport implements UpgradeReport { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java index 6ecb522848291..cf0e7221b406b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeResult; - public class DefaultUpgradeResult implements UpgradeResult { private final Result _result; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java index d0c086f607edd..e11eaf89bfc8d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; - public class DefaultUpgradeStepResult implements UpgradeStepResult { private final String _stepId; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 7ed7169bf20bc..3b3098f43c473 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import io.ebean.Database; import java.util.function.Function; @@ -36,40 +36,42 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - - DbType targetDbType = context.parsedArgs().containsKey(DB_TYPE_ARG) - ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) - : DbType.MYSQL; + DbType targetDbType = + context.parsedArgs().containsKey(DB_TYPE_ARG) + ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) + : DbType.MYSQL; String sqlUpdateStr; switch (targetDbType) { case POSTGRES: - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint not null,\n" - + " metadata text not null,\n" - + " systemmetadata text,\n" - + " createdon timestamp not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint not null,\n" + + " metadata text not null,\n" + + " systemmetadata text,\n" + + " createdon timestamp not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; default: // both mysql and maria - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint(20) not null,\n" - + " metadata longtext not null,\n" - + " systemmetadata longtext,\n" - + " createdon datetime(6) not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint(20) not null,\n" + + " metadata longtext not null,\n" + + " systemmetadata longtext,\n" + + " createdon datetime(6) not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; } @@ -77,9 +79,7 @@ public Function executable() { _server.execute(_server.createSqlUpdate(sqlUpdateStr)); } catch (Exception e) { context.report().addLine("Failed to create table metadata_aspect_v2", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 1b5770a11ff62..ac56e5e91c72b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -1,22 +1,22 @@ package com.linkedin.datahub.upgrade.nocode; +import com.datahub.util.RecordUtils; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.PegasusUtils; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.PagedList; @@ -29,13 +29,13 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; - public class DataMigrationStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; private static final long DEFAULT_BATCH_DELAY_MS = 250; - private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); + private static final String BROWSE_PATHS_ASPECT_NAME = + PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); private final Database _server; private final EntityService _entityService; @@ -64,7 +64,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting data migration..."); final int rowCount = _server.find(EbeanAspectV1.class).findCount(); context.report().addLine(String.format("Found %s rows in legacy aspects table", rowCount)); @@ -74,7 +73,11 @@ public Function executable() { int count = getBatchSize(context.parsedArgs()); while (start < rowCount) { - context.report().addLine(String.format("Reading rows %s through %s from legacy aspects table.", start, start + count)); + context + .report() + .addLine( + String.format( + "Reading rows %s through %s from legacy aspects table.", start, start + count)); PagedList rows = getPagedAspects(start, count); for (EbeanAspectV1 oldAspect : rows.getList()) { @@ -84,11 +87,18 @@ public Function executable() { // 1. Instantiate the RecordTemplate class associated with the aspect. final RecordTemplate aspectRecord; try { - aspectRecord = RecordUtils.toRecordTemplate( - Class.forName(oldAspectName).asSubclass(RecordTemplate.class), - oldAspect.getMetadata()); + aspectRecord = + RecordUtils.toRecordTemplate( + Class.forName(oldAspectName).asSubclass(RecordTemplate.class), + oldAspect.getMetadata()); } catch (Exception e) { - context.report().addLine(String.format("Failed to convert aspect with name %s into a RecordTemplate class", oldAspectName), e); + context + .report() + .addLine( + String.format( + "Failed to convert aspect with name %s into a RecordTemplate class", + oldAspectName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -97,7 +107,11 @@ public Function executable() { try { urn = Urn.createFromString(oldAspect.getKey().getUrn()); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to bind Urn with value %s into Urn object", oldAspect.getKey().getUrn()), e); + throw new RuntimeException( + String.format( + "Failed to bind Urn with value %s into Urn object", + oldAspect.getKey().getUrn()), + e); } // 3. Verify that the entity associated with the aspect is found in the registry. @@ -106,7 +120,12 @@ public Function executable() { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find Entity with name %s in Entity Registry", entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -115,9 +134,13 @@ public Function executable() { try { newAspectName = PegasusUtils.getAspectNameFromSchema(aspectRecord.schema()); } catch (Exception e) { - context.report().addLine(String.format("Failed to retrieve @Aspect name from schema %s, urn %s", - aspectRecord.schema().getFullName(), - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to retrieve @Aspect name from schema %s, urn %s", + aspectRecord.schema().getFullName(), entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -126,23 +149,24 @@ public Function executable() { try { aspectSpec = entitySpec.getAspectSpec(newAspectName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - newAspectName, - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + newAspectName, entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; _entityService.ingestAspects( - urn, - List.of(Pair.of(newAspectName, aspectRecord)), - toAuditStamp(oldAspect), - null - ); + urn, List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), null); // 7. If necessary, emit a browse path aspect. - if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) && !urnsWithBrowsePath.contains(urn)) { + if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) + && !urnsWithBrowsePath.contains(urn)) { // Emit a browse path aspect. final BrowsePaths browsePaths; try { @@ -152,7 +176,11 @@ public Function executable() { browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); + _entityService.ingestAspects( + urn, + List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), + browsePathsStamp, + null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { @@ -167,13 +195,17 @@ public Function executable() { try { TimeUnit.MILLISECONDS.sleep(getBatchDelayMs(context.parsedArgs())); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } } if (totalRowsMigrated != rowCount) { - context.report().addLine(String.format("Number of rows migrated %s does not equal the number of input rows %s...", - totalRowsMigrated, - rowCount)); + context + .report() + .addLine( + String.format( + "Number of rows migrated %s does not equal the number of input rows %s...", + totalRowsMigrated, rowCount)); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -195,9 +227,9 @@ private AuditStamp toAuditStamp(final EbeanAspectV1 aspect) { return auditStamp; } - private PagedList getPagedAspects(final int start, final int pageSize) { - return _server.find(EbeanAspectV1.class) + return _server + .find(EbeanAspectV1.class) .select(EbeanAspectV1.ALL_COLUMNS) .setFirstRow(start) .setMaxRows(pageSize) @@ -219,7 +251,8 @@ private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (parsedArgs.containsKey(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME) && parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).isPresent()) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index a299deb874721..6753d309b9f50 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -30,10 +30,7 @@ public NoCodeUpgrade( final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps( - server, entityService, - entityRegistry, - entityClient); + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); _cleanupSteps = buildCleanupSteps(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index cf8e848762f14..6180573d902d2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -7,10 +7,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class RemoveAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index 0fe9afa8cc6f8..d22af9d292400 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; import io.ebean.Database; import java.util.function.Function; @@ -29,7 +29,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - if (context.parsedArgs().containsKey(NoCodeUpgrade.FORCE_UPGRADE_ARG_NAME)) { context.report().addLine("Forced upgrade detected. Proceeding with upgrade..."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -43,7 +42,8 @@ public Function executable() { } // Unqualified (Table already exists) context.report().addLine("Failed to qualify upgrade candidate. Aborting the upgrade..."); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); + return new DefaultUpgradeStepResult( + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); @@ -67,8 +67,13 @@ private boolean isQualified(Database server, UpgradeContext context) { return true; } context.report().addLine(String.format("-- V2 table has %d rows", v2TableRowCount)); - context.report().addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); - context.report().addLine("-- If V2 table has significantly less rows, consider running the forced upgrade. "); + context + .report() + .addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); + context + .report() + .addLine( + "-- If V2 table has significantly less rows, consider running the forced upgrade. "); return false; } context.report().addLine("-- V2 table does not exist"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 8005e31e01c67..ba0a0124545e9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -7,7 +7,6 @@ import io.ebean.Database; import java.util.function.Function; - // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { @@ -34,9 +33,7 @@ public Function executable() { _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java index 12ff125a05127..5066e05f8bf5a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java @@ -6,10 +6,8 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; // Do we need SQL-tech specific migration paths? @Slf4j @@ -44,9 +42,7 @@ public Function executable() { } } catch (Exception e) { context.report().addLine("Failed to delete legacy data from graph", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java index 9a64d5fe1810c..05656373377b9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java @@ -11,7 +11,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; - // Do we need SQL-tech specific migration paths? @RequiredArgsConstructor public class DeleteLegacySearchIndicesStep implements UpgradeStep { @@ -20,7 +19,8 @@ public class DeleteLegacySearchIndicesStep implements UpgradeStep { private final RestHighLevelClient _searchClient; - public DeleteLegacySearchIndicesStep(final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public DeleteLegacySearchIndicesStep( + final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _searchClient = searchClient; deletePattern = indexConvention.getPrefix().map(p -> p + "_").orElse("") + "*document*"; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index a5d8d6ce9b666..8a267be6ad808 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -11,15 +11,17 @@ import java.util.List; import org.opensearch.client.RestHighLevelClient; - public class NoCodeCleanupUpgrade implements Upgrade { private final List _steps; private final List _cleanupSteps; // Upgrade requires the Database. - public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public NoCodeCleanupUpgrade( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); _cleanupSteps = buildCleanupSteps(); } @@ -43,8 +45,11 @@ private List buildCleanupSteps() { return Collections.emptyList(); } - private List buildUpgradeSteps(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + private List buildUpgradeSteps( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { final List steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); steps.add(new DeleteAspectTableStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 67a226f8f0676..15c7584532e2c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -8,7 +8,6 @@ import io.ebean.Database; import java.util.function.Function; - public class NoCodeUpgradeQualificationStep implements UpgradeStep { private final Database _server; @@ -33,23 +32,19 @@ public Function executable() { try { if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { // Unqualified (V2 Table does not exist) - context.report().addLine("You have not successfully migrated yet. Aborting the cleanup..."); + context + .report() + .addLine("You have not successfully migrated yet. Aborting the cleanup..."); return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED, - UpgradeStepResult.Action.ABORT); + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } else { // Qualified. context.report().addLine("Found qualified upgrade candidate. Proceeding with upgrade..."); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists: %s", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index b55d439745e69..7e55dcddc639f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -11,7 +11,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements UpgradeStep { @@ -33,9 +32,10 @@ public boolean skip(UpgradeContext context) { @Override public Function executable() { return upgradeContext -> { - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, - new HashMap<>(), true); - return (UpgradeStepResult) new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, new HashMap<>(), true); + return (UpgradeStepResult) + new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index f8af69dba0865..dc95b7605ef88 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -8,7 +8,6 @@ import java.util.ArrayList; import java.util.List; - public class RemoveUnknownAspects implements Upgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 0303739e62afe..addf6dcb89c1a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -8,10 +8,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class ClearAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 9175ad606e3c8..b11abb2d6bc23 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -17,7 +17,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreBackup implements Upgrade { private final List _steps; @@ -29,7 +28,8 @@ public RestoreBackup( final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + _steps = + buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 42f7f0073e59b..5c4567c856d0e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -35,7 +34,6 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; @@ -43,11 +41,13 @@ public class RestoreStorageStep implements UpgradeStep { private final EntityService _entityService; private final EntityRegistry _entityRegistry; - private final Map>>> _backupReaders; + private final Map>>> + _backupReaders; private final ExecutorService _fileReaderThreadPool; private final ExecutorService _gmsThreadPool; - public RestoreStorageStep(final EntityService entityService, final EntityRegistry entityRegistry) { + public RestoreStorageStep( + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -82,7 +82,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting backup restore..."); int numRows = 0; Optional backupReaderName = context.parsedArgs().get("BACKUP_READER"); @@ -93,19 +92,32 @@ public Function executable() { return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } - Class> clazz = _backupReaders.get(backupReaderName.get()); + Class> clazz = + _backupReaders.get(backupReaderName.get()); List argNames = BackupReaderArgs.getArgNames(clazz); - List> args = argNames.stream().map(argName -> context.parsedArgs().get(argName)).collect( - Collectors.toList()); + List> args = + argNames.stream() + .map(argName -> context.parsedArgs().get(argName)) + .collect(Collectors.toList()); BackupReader backupReader; try { backupReader = clazz.getConstructor(List.class).newInstance(args); - } catch (InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { + } catch (InstantiationException + | InvocationTargetException + | IllegalAccessException + | NoSuchMethodException e) { e.printStackTrace(); - context.report().addLine("Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); - throw new IllegalArgumentException("Invalid BackupReader: " + clazz.getSimpleName() + ", need to implement proper constructor."); + context + .report() + .addLine( + "Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); + throw new IllegalArgumentException( + "Invalid BackupReader: " + + clazz.getSimpleName() + + ", need to implement proper constructor."); } - EbeanAspectBackupIterator iterator = backupReader.getBackupIterator(context); + EbeanAspectBackupIterator iterator = + backupReader.getBackupIterator(context); ReaderWrapper reader; List> futureList = new ArrayList<>(); while ((reader = iterator.getNextReader()) != null) { @@ -138,9 +150,12 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), e); + String.format( + "Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), + e); continue; } @@ -150,8 +165,11 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format("Failed to find Entity with name %s in Entity Registry", entityName), + e); continue; } final String aspectName = aspect.getKey().getAspect(); @@ -160,11 +178,16 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to create aspect record with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to create aspect record with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } @@ -173,17 +196,27 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectSpec = entitySpec.getAspectSpec(aspectName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } // 5. Write the row back using the EntityService final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); - futureList.add(_gmsThreadPool.submit(() -> - _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); + futureList.add( + _gmsThreadPool.submit( + () -> + _entityService + .ingestAspects( + urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null) + .get(0) + .getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 7ea1811adfdd8..212f0da9f592d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -3,10 +3,10 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import javax.annotation.Nonnull; - /** - * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be - * ingested back into GMS. Must have a constructor that takes a List of Optional Strings + * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 + * object to be ingested back into GMS. Must have a constructor that takes a List of Optional + * Strings */ public interface BackupReader { String getName(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java index 20f43b5414ddd..6176d56fbec95 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java @@ -4,14 +4,9 @@ import java.util.List; import java.util.Map; - -/** - * Retains a map of what arguments are passed in to a backup reader - */ +/** Retains a map of what arguments are passed in to a backup reader */ public final class BackupReaderArgs { - private BackupReaderArgs() { - - } + private BackupReaderArgs() {} private static final Map, List> ARGS_MAP; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java index 3a2505311e245..cce5928277a20 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java @@ -7,10 +7,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * Base interface for iterators that retrieves EbeanAspectV2 objects - * This allows us to restore from backups of various format + * Base interface for iterators that retrieves EbeanAspectV2 objects This allows us to restore from + * backups of various format */ @Slf4j @RequiredArgsConstructor @@ -35,12 +34,13 @@ public T getNextReader() { @Override public void close() { - _readers.forEach(reader -> { - try { - reader.close(); - } catch (IOException e) { - log.error("Error while closing parquet reader", e); - } - }); + _readers.forEach( + reader -> { + try { + reader.close(); + } catch (IOException e) { + log.error("Error while closing parquet reader", e); + } + }); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java index 9b8a3133ac04c..9f0f81f466cfa 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java @@ -14,10 +14,7 @@ import org.apache.parquet.avro.AvroParquetReader; import org.apache.parquet.hadoop.ParquetReader; - -/** - * BackupReader for retrieving EbeanAspectV2 objects from a local parquet file - */ +/** BackupReader for retrieving EbeanAspectV2 objects from a local parquet file */ @Slf4j public class LocalParquetReader implements BackupReader { @@ -46,16 +43,20 @@ public String getName() { public EbeanAspectBackupIterator getBackupIterator(UpgradeContext context) { Optional path = context.parsedArgs().get("BACKUP_FILE_PATH"); if (!path.isPresent()) { - context.report().addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); + context + .report() + .addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); throw new IllegalArgumentException( "BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); } try { - ParquetReader reader = AvroParquetReader.builder(new Path(path.get())).build(); - return new EbeanAspectBackupIterator<>(ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); + ParquetReader reader = + AvroParquetReader.builder(new Path(path.get())).build(); + return new EbeanAspectBackupIterator<>( + ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); } catch (IOException e) { throw new RuntimeException(String.format("Failed to build ParquetReader: %s", e)); } } -} \ No newline at end of file +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java index 2b7cacff65249..01c502221f77f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java @@ -15,9 +15,9 @@ @Slf4j public class ParquetReaderWrapper extends ReaderWrapper { - private final static long NANOS_PER_MILLISECOND = 1000000; - private final static long MILLIS_IN_DAY = 86400000; - private final static long JULIAN_EPOCH_OFFSET_DAYS = 2440588; + private static final long NANOS_PER_MILLISECOND = 1000000; + private static final long MILLIS_IN_DAY = 86400000; + private static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588; private final ParquetReader _parquetReader; @@ -45,22 +45,30 @@ EbeanAspectV2 convertRecord(GenericRecord record) { ts = (Long) record.get("createdon"); } - return new EbeanAspectV2(record.get("urn").toString(), record.get("aspect").toString(), - (Long) record.get("version"), record.get("metadata").toString(), - Timestamp.from(Instant.ofEpochMilli(ts / 1000)), record.get("createdby").toString(), + return new EbeanAspectV2( + record.get("urn").toString(), + record.get("aspect").toString(), + (Long) record.get("version"), + record.get("metadata").toString(), + Timestamp.from(Instant.ofEpochMilli(ts / 1000)), + record.get("createdby").toString(), Optional.ofNullable(record.get("createdfor")).map(Object::toString).orElse(null), Optional.ofNullable(record.get("systemmetadata")).map(Object::toString).orElse(null)); } private long convertFixed96IntToTs(GenericFixed createdon) { // From https://github.com/apache/parquet-format/pull/49/filesParquetTimestampUtils.java - // and ParquetTimestampUtils.java from https://github.com/kube-reporting/presto/blob/master/presto-parquet/ + // and ParquetTimestampUtils.java from + // https://github.com/kube-reporting/presto/blob/master/presto-parquet/ // src/main/java/io/prestosql/parquet/ParquetTimestampUtils.java byte[] bytes = createdon.bytes(); // little endian encoding - need to invert byte order - long timeOfDayNanos = Longs.fromBytes(bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); + long timeOfDayNanos = + Longs.fromBytes( + bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); int julianDay = Ints.fromBytes(bytes[11], bytes[10], bytes[9], bytes[8]); - return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + (timeOfDayNanos / NANOS_PER_MILLISECOND); + return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + + (timeOfDayNanos / NANOS_PER_MILLISECOND); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java index d0db42e678eea..48d0fa2fda04c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java @@ -5,9 +5,10 @@ import java.io.IOException; import lombok.extern.slf4j.Slf4j; - /** - * Abstract class that reads entries from a given source and transforms then into {@link EbeanAspectV2} instances. + * Abstract class that reads entries from a given source and transforms then into {@link + * EbeanAspectV2} instances. + * * @param The object type to read from a reader source. */ @Slf4j @@ -69,9 +70,15 @@ record = read(); abstract EbeanAspectV2 convertRecord(T record); private void printStat(String prefix) { - log.info("{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," - + " records failed: {}, Total millis in convert: {}", prefix, _fileName, - recordsProcessed, totalTimeSpentInRead / 1000 / 1000, recordsSkipped, recordsFailed, + log.info( + "{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," + + " records failed: {}, Total millis in convert: {}", + prefix, + _fileName, + recordsProcessed, + totalTimeSpentInRead / 1000 / 1000, + recordsSkipped, + recordsFailed, totalTimeSpentInConvert / 1000 / 1000); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 3c0a9762a28c9..8bb3b0073710a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -14,7 +14,6 @@ import java.util.ArrayList; import java.util.List; - public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; public static final String BATCH_DELAY_MS_ARG_NAME = "batchDelayMs"; @@ -29,8 +28,11 @@ public class RestoreIndices implements Upgrade { private final List _steps; - public RestoreIndices(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + public RestoreIndices( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); } @@ -45,8 +47,11 @@ public List steps() { return _steps; } - private List buildSteps(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + private List buildSteps( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index 2ac4fea2e653a..ce59cf2edb84e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.restoreindices; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -11,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; - import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -23,9 +24,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -38,19 +36,24 @@ public class SendMAEStep implements UpgradeStep { private final EntityService _entityService; public class KafkaJob implements Callable { - UpgradeContext context; - RestoreIndicesArgs args; - public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { - this.context = context; - this.args = args; - } - @Override - public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); - } + UpgradeContext context; + RestoreIndicesArgs args; + + public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { + this.context = context; + this.args = args; + } + + @Override + public RestoreIndicesResult call() { + return _entityService.restoreIndices(args, context.report()::addLine); + } } - public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry) { _server = server; _entityService = entityService; } @@ -67,7 +70,7 @@ public int retryCount() { private List iterateFutures(List> futures) { List result = new ArrayList<>(); - for (Future future: new ArrayList<>(futures)) { + for (Future future : new ArrayList<>(futures)) { if (future.isDone()) { try { result.add(future.get()); @@ -100,9 +103,10 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { private int getRowCount(RestoreIndicesArgs args) { ExpressionList countExp = - _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); + _server + .find(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); if (args.aspectName != null) { countExp = countExp.eq(EbeanAspectV2.ASPECT_COLUMN, args.aspectName); } @@ -120,13 +124,18 @@ public Function executable() { return (context) -> { RestoreIndicesResult finalJobResult = new RestoreIndicesResult(); RestoreIndicesArgs args = getArgs(context); - ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); + ThreadPoolExecutor executor = + (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); context.report().addLine("Sending MAE from local DB"); long startTime = System.currentTimeMillis(); final int rowCount = getRowCount(args); - context.report().addLine(String.format("Found %s latest aspects in aspects table in %.2f minutes.", - rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); + context + .report() + .addLine( + String.format( + "Found %s latest aspects in aspects table in %.2f minutes.", + rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); int start = args.start; List> futures = new ArrayList<>(); @@ -139,7 +148,7 @@ public Function executable() { } while (futures.size() > 0) { List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult: tmpResults) { + for (RestoreIndicesResult tmpResult : tmpResults) { reportStats(context, finalJobResult, tmpResult, rowCount, startTime); } } @@ -149,16 +158,23 @@ public Function executable() { if (rowCount > 0) { percentFailed = (float) (rowCount - finalJobResult.rowsMigrated) * 100 / rowCount; } - context.report().addLine(String.format( - "Failed to send MAEs for %d rows (%.2f%% of total).", - rowCount - finalJobResult.rowsMigrated, percentFailed)); + context + .report() + .addLine( + String.format( + "Failed to send MAEs for %d rows (%.2f%% of total).", + rowCount - finalJobResult.rowsMigrated, percentFailed)); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } - private static void reportStats(UpgradeContext context, RestoreIndicesResult finalResult, RestoreIndicesResult tmpResult, - int rowCount, long startTime) { + private static void reportStats( + UpgradeContext context, + RestoreIndicesResult finalResult, + RestoreIndicesResult tmpResult, + int rowCount, + long startTime) { finalResult.ignored += tmpResult.ignored; finalResult.rowsMigrated += tmpResult.rowsMigrated; finalResult.timeSqlQueryMs += tmpResult.timeSqlQueryMs; @@ -178,11 +194,22 @@ private static void reportStats(UpgradeContext context, RestoreIndicesResult fin estimatedTimeMinutesComplete = timeSoFarMinutes * (100 - percentSent) / percentSent; } float totalTimeComplete = timeSoFarMinutes + estimatedTimeMinutesComplete; - context.report().addLine(String.format( - "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", - finalResult.rowsMigrated, rowCount, percentSent, finalResult.ignored, percentIgnored)); - context.report().addLine(String.format("%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", - timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); + context + .report() + .addLine( + String.format( + "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", + finalResult.rowsMigrated, + rowCount, + percentSent, + finalResult.ignored, + percentIgnored)); + context + .report() + .addLine( + String.format( + "%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", + timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); } private int getBatchSize(final Map> parsedArgs) { @@ -196,7 +223,8 @@ private int getStartingOffset(final Map> parsedArgs) { private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (containsKey(parsedArgs, RestoreIndices.BATCH_DELAY_MS_ARG_NAME)) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } @@ -205,7 +233,8 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } - private int getInt(final Map> parsedArgs, int defaultVal, String argKey) { + private int getInt( + final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; if (containsKey(parsedArgs, argKey)) { result = Integer.parseInt(parsedArgs.get(argKey).get()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index 4a8211f2cd4ac..aba751bff8177 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -8,47 +8,48 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class SystemUpdate implements Upgrade { - private final List _preStartupUpgrades; - private final List _postStartupUpgrades; - private final List _steps; - - public SystemUpdate(final BuildIndices buildIndicesJob, final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } - - @Override - public String id() { - return "SystemUpdate"; - } - - @Override - public List steps() { - return Stream.concat(Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), - _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); - } - - @Override - public List cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); - } + private final List _preStartupUpgrades; + private final List _postStartupUpgrades; + private final List _steps; + + public SystemUpdate( + final BuildIndices buildIndicesJob, + final CleanIndices cleanIndicesJob, + final KafkaEventProducer kafkaEventProducer, + final String version, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + + _preStartupUpgrades = List.of(buildIndicesJob); + _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); + _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); + } + + @Override + public String id() { + return "SystemUpdate"; + } + + @Override + public List steps() { + return Stream.concat( + Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), + _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) + .collect(Collectors.toList()); + } + + @Override + public List cleanupSteps() { + return Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), + _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) + .collect(Collectors.toList()); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 1da5b6d6a25ce..eb76a72fba71a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -2,9 +2,9 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; import com.linkedin.metadata.graph.GraphService; @@ -17,49 +17,54 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - public class BuildIndices implements Upgrade { - private final List _steps; - - public BuildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - - final ConfigurationProvider configurationProvider) { - + private final List _steps; - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + public BuildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); - } + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - @Override - public String id() { - return "BuildIndices"; - } + _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + } - @Override - public List steps() { - return _steps; - } + @Override + public String id() { + return "BuildIndices"; + } - private List buildSteps(final List indexedServices, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + @Override + public List steps() { + return _steps; + } - final List steps = new ArrayList<>(); - // Disable ES write mode/change refresh rate and clone indices - steps.add(new BuildIndicesPreStep(baseElasticSearchComponents, indexedServices, configurationProvider)); - // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService - steps.add(new BuildIndicesStep(indexedServices)); - // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in pre-configure step if it already exists? - steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); - return steps; - } + private List buildSteps( + final List indexedServices, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + final List steps = new ArrayList<>(); + // Disable ES write mode/change refresh rate and clone indices + steps.add( + new BuildIndicesPreStep( + baseElasticSearchComponents, indexedServices, configurationProvider)); + // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService + steps.add(new BuildIndicesStep(indexedServices)); + // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in + // pre-configure step if it already exists? + steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); + return steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index 1fb9c8526ad3b..ad68386622b21 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -10,41 +10,45 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CleanIndices implements Upgrade { - private final List _steps; - - public CleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { - - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); - - _steps = List.of(new CleanIndicesStep( + private final List _steps; + + public CleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); + + _steps = + List.of( + new CleanIndicesStep( baseElasticSearchComponents.getSearchClient(), configurationProvider.getElasticSearch(), indexedServices)); - } + } - @Override - public String id() { - return "CleanIndices"; - } + @Override + public String id() { + return "CleanIndices"; + } - @Override - public List steps() { - return _steps; - } + @Override + public List steps() { + return _steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java index 2feca1f27e625..a44f6d6487067 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -13,16 +16,11 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.client.RequestOptions; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPostStep implements UpgradeStep { @@ -45,8 +43,9 @@ public Function executable() { return (context) -> { try { - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); // Reset write blocking @@ -56,12 +55,26 @@ public Function executable() { request.settings(indexSettings); boolean ack = - _esComponents.getSearchClient().indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexConfig.name(), false); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + ack = + IndexUtils.validateWriteBlock( + _esComponents.getSearchClient(), indexConfig.name(), false); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); } if (!ack) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index 82b9428c89fb8..c25888be07f89 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -8,15 +11,13 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; - +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.OpenSearchStatusException; @@ -24,10 +25,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.indices.ResizeRequest; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPreStep implements UpgradeStep { @@ -50,16 +47,19 @@ public Function executable() { return (context) -> { try { // Get indices to update - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); for (ReindexConfig indexConfig : indexConfigs) { - String indexName = IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); + String indexName = + IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); boolean ack = blockWrites(indexName); if (!ack) { - log.error("Partial index settings update, some indices may still be blocking writes." + log.error( + "Partial index settings update, some indices may still be blocking writes." + " Please fix the error and re-run the BuildIndices upgrade job."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -69,10 +69,16 @@ public Function executable() { String clonedName = indexConfig.name() + "_clone_" + System.currentTimeMillis(); ResizeRequest resizeRequest = new ResizeRequest(clonedName, indexName); boolean cloneAck = - _esComponents.getSearchClient().indices().clone(resizeRequest, RequestOptions.DEFAULT).isAcknowledged(); + _esComponents + .getSearchClient() + .indices() + .clone(resizeRequest, RequestOptions.DEFAULT) + .isAcknowledged(); log.info("Cloned index {} into {}, Acknowledged: {}", indexName, clonedName, cloneAck); if (!cloneAck) { - log.error("Partial index settings update, cloned indices may need to be cleaned up: {}", clonedName); + log.error( + "Partial index settings update, cloned indices may need to be cleaned up: {}", + clonedName); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } @@ -85,8 +91,6 @@ public Function executable() { }; } - - private boolean blockWrites(String indexName) throws InterruptedException, IOException { UpdateSettingsRequest request = new UpdateSettingsRequest(indexName); Map indexSettings = ImmutableMap.of(INDEX_BLOCKS_WRITE_SETTING, "true"); @@ -94,13 +98,23 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc request.settings(indexSettings); boolean ack; try { - ack = _esComponents.getSearchClient().indices() - .putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } catch (OpenSearchStatusException | IOException ese) { - // Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps. + // Cover first run case, indices won't exist so settings updates won't work nor will the rest + // of the preConfigure steps. // Since no data are in there they are skippable. - // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :( + // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way + // to extract it :( if (ese.getMessage().contains("index_not_found")) { return true; } else { @@ -110,7 +124,11 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc if (ack) { ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexName, true); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } return ack; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java index ef59f2998929e..d37ee173bd9af 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java @@ -5,13 +5,11 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.shared.ElasticSearchIndexed; - import java.util.List; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class BuildIndicesStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java index bb042bac6df95..c3a4d8ab89c07 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java @@ -1,54 +1,55 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.shared.ElasticSearchIndexed; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; - import java.util.List; import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; +import org.opensearch.client.RestHighLevelClient; @Slf4j public class CleanIndicesStep implements UpgradeStep { - private final RestHighLevelClient searchClient; - private final ElasticSearchConfiguration esConfig; - private final List indexedServices; - - public CleanIndicesStep(final RestHighLevelClient searchClient, final ElasticSearchConfiguration esConfig, - final List indexedServices) { - this.searchClient = searchClient; - this.esConfig = esConfig; - this.indexedServices = indexedServices; - } - - @Override - public String id() { - return "CleanUpIndicesStep"; - } - - @Override - public int retryCount() { - return 0; - } - - @Override - public Function executable() { - return (context) -> { - try { - IndexUtils.getAllReindexConfigs(indexedServices) - .forEach(reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); - } catch (Exception e) { - log.error("CleanUpIndicesStep failed.", e); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } + private final RestHighLevelClient searchClient; + private final ElasticSearchConfiguration esConfig; + private final List indexedServices; + + public CleanIndicesStep( + final RestHighLevelClient searchClient, + final ElasticSearchConfiguration esConfig, + final List indexedServices) { + this.searchClient = searchClient; + this.esConfig = esConfig; + this.indexedServices = indexedServices; + } + + @Override + public String id() { + return "CleanUpIndicesStep"; + } + + @Override + public int retryCount() { + return 0; + } + + @Override + public Function executable() { + return (context) -> { + try { + IndexUtils.getAllReindexConfigs(indexedServices) + .forEach( + reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); + } catch (Exception e) { + log.error("CleanUpIndicesStep failed.", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index 1e568f1e9a9fe..b4a506c3f5c63 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -6,12 +6,10 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; +import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.function.Function; - - @RequiredArgsConstructor @Slf4j public class DataHubStartupStep implements UpgradeStep { @@ -32,8 +30,8 @@ public int retryCount() { public Function executable() { return (context) -> { try { - DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent() - .setVersion(_version); + DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = + new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); log.info("Initiating startup for version: {}", _version); } catch (Exception e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index d9788448444ed..b3de7c503fb3e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,6 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -11,22 +15,18 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - - @Slf4j public class IndexUtils { public static final String INDEX_BLOCKS_WRITE_SETTING = "index.blocks.write"; public static final int INDEX_BLOCKS_WRITE_RETRY = 4; public static final int INDEX_BLOCKS_WRITE_WAIT_SECONDS = 10; - private IndexUtils() { } + + private IndexUtils() {} private static List _reindexConfigs = new ArrayList<>(); - public static List getAllReindexConfigs(List elasticSearchIndexedList) throws IOException { + public static List getAllReindexConfigs( + List elasticSearchIndexedList) throws IOException { // Avoid locking & reprocessing List reindexConfigs = new ArrayList<>(_reindexConfigs); if (reindexConfigs.isEmpty()) { @@ -39,19 +39,24 @@ public static List getAllReindexConfigs(List 0) { - GetSettingsResponse response = esClient.indices().getSettings(request, RequestOptions.DEFAULT); - if (response.getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING).equals(String.valueOf(expectedState))) { + GetSettingsResponse response = + esClient.indices().getSettings(request, RequestOptions.DEFAULT); + if (response + .getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING) + .equals(String.valueOf(expectedState))) { return true; } count = count - 1; @@ -64,20 +69,20 @@ public static boolean validateWriteBlock(RestHighLevelClient esClient, String in return false; } - public static String resolveAlias(RestHighLevelClient esClient, String indexName) throws IOException { + public static String resolveAlias(RestHighLevelClient esClient, String indexName) + throws IOException { String finalIndexName = indexName; - GetAliasesResponse aliasResponse = esClient.indices() - .getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); + GetAliasesResponse aliasResponse = + esClient.indices().getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); if (!aliasResponse.getAliases().isEmpty()) { Set indices = aliasResponse.getAliases().keySet(); if (indices.size() != 1) { throw new NotImplementedException( - String.format("Clone not supported for %s indices in alias %s. Indices: %s", - indices.size(), - indexName, - String.join(",", indices))); + String.format( + "Clone not supported for %s indices in alias %s. Indices: %s", + indices.size(), indexName, String.join(",", indices))); } finalIndexName = indices.stream().findFirst().get(); log.info("Alias {} resolved to index {}", indexName, finalIndexName); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index e213c0b2fd4de..03f0b0b7f2ec2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.search.SearchService; import java.util.List; - public class BackfillBrowsePathsV2 implements Upgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 08a752d9597f4..610d9069337a5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.system.entity.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -27,32 +29,29 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = + "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; - private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; private final EntityService _entityService; @@ -67,14 +66,18 @@ public BackfillBrowsePathsV2Step(EntityService entityService, SearchService sear public Function executable() { return (context) -> { final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", migratedCount, - migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -88,22 +91,26 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { filter = backfillDefaultBrowsePathsV2Filter(); - } else { + } else { filter = backfillBrowsePathsV2Filter(); } - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - null, - BATCH_SIZE, - new SearchFlags().setFulltext(true).setSkipCache(true).setSkipHighlighting(true).setSkipAggregates(true) - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + BATCH_SIZE, + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -113,7 +120,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -177,13 +188,10 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - true - ); + _entityService.ingestProposal(proposal, auditStamp, true); } @Override @@ -192,7 +200,8 @@ public String id() { } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ @Override public boolean isOptional() { @@ -204,4 +213,3 @@ public boolean skip(UpgradeContext context) { return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); } } - diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index db697a40d0c6c..83b8e028727ce 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -1,70 +1,73 @@ package com.linkedin.datahub.upgrade; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.datahub.upgrade.system.SystemUpdate; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, - properties = { - "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" + }) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("systemUpdate") - private SystemUpdate systemUpdate; - - @Test - public void testSystemUpdateInit() { - assertNotNull(systemUpdate); - } + @Autowired + @Named("systemUpdate") + private SystemUpdate systemUpdate; - @Test - public void testSystemUpdateSend() { - UpgradeStepResult.Result result = systemUpdate.steps().stream() - .filter(s -> s.id().equals("DataHubStartupStep")) - .findFirst().get() - .executable().apply(new UpgradeContext() { - @Override - public Upgrade upgrade() { - return null; - } + @Test + public void testSystemUpdateInit() { + assertNotNull(systemUpdate); + } - @Override - public List stepResults() { - return null; - } + @Test + public void testSystemUpdateSend() { + UpgradeStepResult.Result result = + systemUpdate.steps().stream() + .filter(s -> s.id().equals("DataHubStartupStep")) + .findFirst() + .get() + .executable() + .apply( + new UpgradeContext() { + @Override + public Upgrade upgrade() { + return null; + } - @Override - public UpgradeReport report() { - return null; - } + @Override + public List stepResults() { + return null; + } - @Override - public List args() { - return null; - } + @Override + public UpgradeReport report() { + return null; + } - @Override - public Map> parsedArgs() { - return null; - } - }).result(); - assertEquals("SUCCEEDED", result.toString()); - } + @Override + public List args() { + return null; + } + @Override + public Map> parsedArgs() { + return null; + } + }) + .result(); + assertEquals("SUCCEEDED", result.toString()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 74cde414adc2f..3e655be900bf2 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -1,49 +1,48 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import static org.testng.AssertJUnit.*; + import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import static org.testng.AssertJUnit.*; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("restoreIndices") - private RestoreIndices restoreIndices; - - @Autowired - @Named("buildIndices") - private BuildIndices buildIndices; - - @Autowired - private ESIndexBuilder esIndexBuilder; - - @Test - public void testRestoreIndicesInit() { - /* - This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean - */ - assertTrue(restoreIndices.steps().size() >= 3); - } - - @Test - public void testBuildIndicesInit() { - assertEquals("BuildIndices", buildIndices.id()); - assertTrue(buildIndices.steps().size() >= 3); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); - assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); - assertFalse(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); - } - + @Autowired + @Named("restoreIndices") + private RestoreIndices restoreIndices; + + @Autowired + @Named("buildIndices") + private BuildIndices buildIndices; + + @Autowired private ESIndexBuilder esIndexBuilder; + + @Test + public void testRestoreIndicesInit() { + /* + This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean + */ + assertTrue(restoreIndices.steps().size() >= 3); + } + + @Test + public void testBuildIndicesInit() { + assertEquals("BuildIndices", buildIndices.id()); + assertTrue(buildIndices.steps().size() >= 3); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); + assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); + assertFalse( + esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 6cc853b2c7c4d..0e7bf5ddd5250 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -16,27 +16,19 @@ @Import(value = {SystemAuthenticationFactory.class}) public class UpgradeCliApplicationTestConfiguration { - @MockBean - private UpgradeCli upgradeCli; + @MockBean private UpgradeCli upgradeCli; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityService _entityService; + @MockBean private EntityService _entityService; - @MockBean - private SearchService searchService; + @MockBean private SearchService searchService; - @MockBean - private GraphService graphService; + @MockBean private GraphService graphService; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - ConfigEntityRegistry configEntityRegistry; + @MockBean ConfigEntityRegistry configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/docker/build.gradle b/docker/build.gradle index c7f783af6c997..bc79be501b395 100644 --- a/docker/build.gradle +++ b/docker/build.gradle @@ -158,4 +158,4 @@ task quickstartPg(type: Exec, dependsOn: ':metadata-ingestion:install') { ] + pg_compose_args commandLine 'bash', '-c', cmd.join(" ") -} +} \ No newline at end of file diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java index 015a0a9a0f14a..9cf8b4174ecfb 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/AspectSpec.java @@ -27,12 +27,11 @@ public class AspectSpec { // Classpath & Pegasus-specific: Temporary. private final RecordDataSchema _schema; private final Class _aspectClass; - @Setter @Getter - private String registryName = "unknownRegistry"; - @Setter @Getter - private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); + @Setter @Getter private String registryName = "unknownRegistry"; + @Setter @Getter private ComparableVersion registryVersion = new ComparableVersion("0.0.0.0-dev"); - public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, + public AspectSpec( + @Nonnull final AspectAnnotation aspectAnnotation, @Nonnull final List searchableFieldSpecs, @Nonnull final List searchScoreFieldSpecs, @Nonnull final List relationshipFieldSpecs, @@ -41,18 +40,35 @@ public AspectSpec(@Nonnull final AspectAnnotation aspectAnnotation, final RecordDataSchema schema, final Class aspectClass) { _aspectAnnotation = aspectAnnotation; - _searchableFieldSpecs = searchableFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _searchScoreFieldSpecs = searchScoreFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _relationshipFieldSpecs = relationshipFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); - _timeseriesFieldSpecs = timeseriesFieldSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldAnnotation().getStatName(), spec -> spec, - (val1, val2) -> val1)); - _timeseriesFieldCollectionSpecs = timeseriesFieldCollectionSpecs.stream() - .collect(Collectors.toMap(spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), spec -> spec, - (val1, val2) -> val1)); + _searchableFieldSpecs = + searchableFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _searchScoreFieldSpecs = + searchScoreFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _relationshipFieldSpecs = + relationshipFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getPath().toString(), spec -> spec, (val1, val2) -> val1)); + _timeseriesFieldSpecs = + timeseriesFieldSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldAnnotation().getStatName(), + spec -> spec, + (val1, val2) -> val1)); + _timeseriesFieldCollectionSpecs = + timeseriesFieldCollectionSpecs.stream() + .collect( + Collectors.toMap( + spec -> spec.getTimeseriesFieldCollectionAnnotation().getCollectionName(), + spec -> spec, + (val1, val2) -> val1)); _schema = schema; _aspectClass = aspectClass; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java index 766944e150390..b235e2adcae11 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ConfigEntitySpec.java @@ -10,10 +10,8 @@ import java.util.function.Function; import java.util.stream.Collectors; import javax.annotation.Nonnull; - import lombok.ToString; - @ToString public class ConfigEntitySpec implements EntitySpec { @@ -26,7 +24,8 @@ public ConfigEntitySpec( @Nonnull final String entityName, @Nonnull final String keyAspect, @Nonnull final Collection aspectSpecs) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = new EntityAnnotation(entityName, keyAspect); } @@ -72,12 +71,14 @@ public AspectSpec getAspectSpec(final String name) { @Override public RecordDataSchema getSnapshotSchema() { - throw new UnsupportedOperationException("Failed to find Snapshot associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Snapshot associated with Config-based Entity"); } @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Failed to find Typeref schema associated with Config-based Entity"); + throw new UnsupportedOperationException( + "Failed to find Typeref schema associated with Config-based Entity"); } @Override @@ -89,4 +90,3 @@ public List getSearchableFieldSpecs() { return _searchableFieldSpecs; } } - diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java index ddc87aacc72cf..b9766d0ca8640 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DataSchemaFactory.java @@ -23,7 +23,6 @@ import lombok.extern.slf4j.Slf4j; import org.reflections.Reflections; - /** * Factory class to get a map of all entity schemas and aspect schemas under com.linkedin package * This lets us fetch the PDL data schema of an arbitrary entity or aspect based on their names @@ -39,22 +38,25 @@ public class DataSchemaFactory { private static final String NAME_FIELD = "name"; private static final DataSchemaFactory INSTANCE = new DataSchemaFactory(); - private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = new String[]{"com", "org", "io", "datahub"}; + private static final String[] DEFAULT_TOP_LEVEL_NAMESPACES = + new String[] {"com", "org", "io", "datahub"}; public DataSchemaFactory() { - this(new String[]{"com.linkedin", "com.datahub"}); + this(new String[] {"com.linkedin", "com.datahub"}); } public DataSchemaFactory(String classPath) { - this(new String[]{classPath}); + this(new String[] {classPath}); } + public DataSchemaFactory(String[] classPaths) { this(classPaths, null); } /** - * Construct a DataSchemaFactory with classes and schemas found under a specific folder. - * This will only look for classes under the `com`, `org` or `datahub` top level namespaces. + * Construct a DataSchemaFactory with classes and schemas found under a specific folder. This will + * only look for classes under the `com`, `org` or `datahub` top level namespaces. + * * @param pluginLocation The location of the classes and schema files. */ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws IOException { @@ -66,15 +68,18 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws File pluginDir = pluginLocation.toFile(); if (!pluginDir.exists()) { throw new RuntimeException( - "Failed to find plugin directory " + pluginDir.getAbsolutePath() + ". Current directory is " + new File( - ".").getAbsolutePath()); + "Failed to find plugin directory " + + pluginDir.getAbsolutePath() + + ". Current directory is " + + new File(".").getAbsolutePath()); } List urls = new ArrayList(); if (pluginDir.isDirectory()) { - List jarFiles = Files.walk(pluginLocation) - .filter(Files::isRegularFile) - .filter(p -> p.toString().endsWith(".jar")) - .collect(Collectors.toList()); + List jarFiles = + Files.walk(pluginLocation) + .filter(Files::isRegularFile) + .filter(p -> p.toString().endsWith(".jar")) + .collect(Collectors.toList()); for (Path f : jarFiles) { URL url = f.toUri().toURL(); if (url != null) { @@ -87,12 +92,14 @@ public static DataSchemaFactory withCustomClasspath(Path pluginLocation) throws } URL[] urlsArray = new URL[urls.size()]; urls.toArray(urlsArray); - URLClassLoader classLoader = new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); + URLClassLoader classLoader = + new URLClassLoader(urlsArray, Thread.currentThread().getContextClassLoader()); return new DataSchemaFactory(DEFAULT_TOP_LEVEL_NAMESPACES, classLoader); } /** - * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look for entities and aspects. + * Construct a DataSchemaFactory with a custom class loader and a list of class namespaces to look + * for entities and aspects. */ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader) { entitySchemas = new HashMap<>(); @@ -120,7 +127,8 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader Reflections reflections = new Reflections(namespace, standardClassLoader); stdClasses.addAll(reflections.getSubTypesOf(RecordTemplate.class)); } - log.debug("Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); + log.debug( + "Standard ClassLoader found a total of {} RecordTemplate classes", stdClasses.size()); classes.removeAll(stdClasses); log.debug("Finally found a total of {} RecordTemplate classes to inspect", classes.size()); } @@ -135,15 +143,19 @@ public DataSchemaFactory(String[] classNamespaces, ClassLoader customClassLoader if (schema != null) { DataSchema finalSchema = schema; - getName(schema, EntityAnnotation.ANNOTATION_NAME).ifPresent( - entityName -> entitySchemas.put(entityName, finalSchema)); - getName(schema, AspectAnnotation.ANNOTATION_NAME).ifPresent(aspectName -> { - aspectSchemas.put(aspectName, finalSchema); - aspectClasses.put(aspectName, recordClass); - }); - getName(schema, EventAnnotation.ANNOTATION_NAME).ifPresent(eventName -> { - eventSchemas.put(eventName, finalSchema); - }); + getName(schema, EntityAnnotation.ANNOTATION_NAME) + .ifPresent(entityName -> entitySchemas.put(entityName, finalSchema)); + getName(schema, AspectAnnotation.ANNOTATION_NAME) + .ifPresent( + aspectName -> { + aspectSchemas.put(aspectName, finalSchema); + aspectClasses.put(aspectName, recordClass); + }); + getName(schema, EventAnnotation.ANNOTATION_NAME) + .ifPresent( + eventName -> { + eventSchemas.put(eventName, finalSchema); + }); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java index 9a083660d1023..5db8ca264f69d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEntitySpec.java @@ -11,7 +11,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.ToString; @ToString @@ -31,7 +30,8 @@ public DefaultEntitySpec( @Nonnull final EntityAnnotation entityAnnotation, @Nonnull final RecordDataSchema snapshotSchema, @Nullable final TyperefDataSchema aspectTyperefSchema) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; _snapshotSchema = snapshotSchema; _aspectTyperefSchema = aspectTyperefSchema; @@ -102,5 +102,4 @@ public List getSearchableFieldSpecs() { return _searchableFieldSpecs; } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java index 7f7c1004aeddb..31b73e6cc9e5e 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/DefaultEventSpec.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor @Getter public class DefaultEventSpec implements EventSpec { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java index 4bdb8e37d565f..e4c9dd55a3b4a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpec.java @@ -7,10 +7,7 @@ import java.util.Map; import java.util.stream.Collectors; - -/** - * A specification of a DataHub Entity - */ +/** A specification of a DataHub Entity */ public interface EntitySpec { String getName(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java index 37cb9eabc09da..580134f566871 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecBuilder.java @@ -28,7 +28,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EntitySpecBuilder { @@ -61,7 +60,8 @@ public EntitySpecBuilder(final AnnotationExtractionMode extractionMode) { public List buildEntitySpecs(@Nonnull final DataSchema snapshotSchema) { - final UnionDataSchema snapshotUnionSchema = (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); + final UnionDataSchema snapshotUnionSchema = + (UnionDataSchema) snapshotSchema.getDereferencedDataSchema(); final List unionMembers = snapshotUnionSchema.getMembers(); final List entitySpecs = new ArrayList<>(); @@ -73,16 +73,19 @@ public List buildEntitySpecs(@Nonnull final DataSchema snapshotSchem } // Now validate that all relationships point to valid entities. - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { -// if (!_entityNames.containsAll( -// spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { -// failValidation( -// String.format("Found invalid relationship with name %s at path %s. Invalid entityType(s) provided.", -// spec.getRelationshipName(), spec.getPath())); -// } -// } + // for (final RelationshipFieldSpec spec : _relationshipFieldSpecs) { + // if (!_entityNames.containsAll( + // + // spec.getValidDestinationTypes().stream().map(String::toLowerCase).collect(Collectors.toList()))) { + // failValidation( + // String.format("Found invalid relationship with name %s at path %s. Invalid + // entityType(s) provided.", + // spec.getRelationshipName(), spec.getPath())); + // } + // } return entitySpecs; } @@ -93,17 +96,21 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); final ArrayDataSchema aspectArraySchema = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) - .getType() - .getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); final UnionDataSchema aspectUnionSchema = (UnionDataSchema) aspectArraySchema.getItems().getDereferencedDataSchema(); @@ -113,111 +120,147 @@ public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema for (final UnionDataSchema.Member member : unionMembers) { NamedDataSchema namedDataSchema = (NamedDataSchema) member.getType(); try { - final AspectSpec spec = buildAspectSpec(member.getType(), - (Class) Class.forName(namedDataSchema.getFullName()).asSubclass(RecordTemplate.class)); + final AspectSpec spec = + buildAspectSpec( + member.getType(), + (Class) + Class.forName(namedDataSchema.getFullName()) + .asSubclass(RecordTemplate.class)); aspectSpecs.add(spec); } catch (ClassNotFoundException ce) { log.warn("Failed to find class for {}", member.getType(), ce); } } - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema, - (TyperefDataSchema) aspectArraySchema.getItems()); + final EntitySpec entitySpec = + new DefaultEntitySpec( + aspectSpecs, + entityAnnotation, + entitySnapshotRecordSchema, + (TyperefDataSchema) aspectArraySchema.getItems()); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - public EntitySpec buildEntitySpec(@Nonnull final DataSchema entitySnapshotSchema, - @Nonnull final List aspectSpecs) { + public EntitySpec buildEntitySpec( + @Nonnull final DataSchema entitySnapshotSchema, @Nonnull final List aspectSpecs) { // 0. Validate the Snapshot definition final RecordDataSchema entitySnapshotRecordSchema = validateSnapshot(entitySnapshotSchema); // 1. Parse information about the entity from the "entity" annotation. - final Object entityAnnotationObj = entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotRecordSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { EntityAnnotation entityAnnotation = - EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); + EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotRecordSchema.getFullName()); - final EntitySpec entitySpec = new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); + final EntitySpec entitySpec = + new DefaultEntitySpec(aspectSpecs, entityAnnotation, entitySnapshotRecordSchema); validateEntitySpec(entitySpec); return entitySpec; } - failValidation(String.format("Could not build entity spec for entity with name %s. Missing @%s annotation.", - entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); + failValidation( + String.format( + "Could not build entity spec for entity with name %s. Missing @%s annotation.", + entitySnapshotRecordSchema.getName(), EntityAnnotation.ANNOTATION_NAME)); return null; } - /** - * Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} - */ - public EntitySpec buildConfigEntitySpec(@Nonnull final String entityName, @Nonnull final String keyAspect, + /** Build a config-based {@link EntitySpec}, as opposed to a Snapshot-based {@link EntitySpec} */ + public EntitySpec buildConfigEntitySpec( + @Nonnull final String entityName, + @Nonnull final String keyAspect, @Nonnull final List aspectSpecs) { return new ConfigEntitySpec(entityName, keyAspect, aspectSpecs); } - public EntitySpec buildPartialEntitySpec(@Nonnull final String entityName, @Nullable final String keyAspectName, + public EntitySpec buildPartialEntitySpec( + @Nonnull final String entityName, + @Nullable final String keyAspectName, @Nonnull final List aspectSpecs) { - EntitySpec entitySpec = new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); + EntitySpec entitySpec = + new PartialEntitySpec(aspectSpecs, new EntityAnnotation(entityName, keyAspectName)); return entitySpec; } - public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, - final Class aspectClass) { + public AspectSpec buildAspectSpec( + @Nonnull final DataSchema aspectDataSchema, final Class aspectClass) { final RecordDataSchema aspectRecordSchema = validateAspect(aspectDataSchema); - final Object aspectAnnotationObj = aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectRecordSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { final AspectAnnotation aspectAnnotation = - AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectRecordSchema.getFullName()); + AspectAnnotation.fromSchemaProperty( + aspectAnnotationObj, aspectRecordSchema.getFullName()); if (AnnotationExtractionMode.IGNORE_ASPECT_FIELDS.equals(_extractionMode)) { // Short Circuit. - return new AspectSpec(aspectAnnotation, Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), aspectRecordSchema, aspectClass); + return new AspectSpec( + aspectAnnotation, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + aspectRecordSchema, + aspectClass); } final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Searchable Field Specs - final SearchableFieldSpecExtractor searchableFieldSpecExtractor = new SearchableFieldSpecExtractor(); + final SearchableFieldSpecExtractor searchableFieldSpecExtractor = + new SearchableFieldSpecExtractor(); final DataSchemaRichContextTraverser searchableFieldSpecTraverser = new DataSchemaRichContextTraverser(searchableFieldSpecExtractor); searchableFieldSpecTraverser.traverse(processedSearchResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedSearchScoreResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_searchScoreHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_searchScoreHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract SearchScore Field Specs - final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = new SearchScoreFieldSpecExtractor(); + final SearchScoreFieldSpecExtractor searchScoreFieldSpecExtractor = + new SearchScoreFieldSpecExtractor(); final DataSchemaRichContextTraverser searcScoreFieldSpecTraverser = new DataSchemaRichContextTraverser(searchScoreFieldSpecExtractor); searcScoreFieldSpecTraverser.traverse(processedSearchScoreResult.getResultSchema()); final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedRelationshipResult = - SchemaAnnotationProcessor.process(Collections.singletonList(_relationshipHandler), aspectRecordSchema, + SchemaAnnotationProcessor.process( + Collections.singletonList(_relationshipHandler), + aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract Relationship Field Specs - final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = new RelationshipFieldSpecExtractor(); + final RelationshipFieldSpecExtractor relationshipFieldSpecExtractor = + new RelationshipFieldSpecExtractor(); final DataSchemaRichContextTraverser relationshipFieldSpecTraverser = new DataSchemaRichContextTraverser(relationshipFieldSpecExtractor); relationshipFieldSpecTraverser.traverse(processedRelationshipResult.getResultSchema()); @@ -227,23 +270,33 @@ public AspectSpec buildAspectSpec(@Nonnull final DataSchema aspectDataSchema, final SchemaAnnotationProcessor.SchemaAnnotationProcessResult processedTimeseriesFieldResult = SchemaAnnotationProcessor.process( - ImmutableList.of(_timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), - aspectRecordSchema, new SchemaAnnotationProcessor.AnnotationProcessOption()); + ImmutableList.of( + _timeseriesFiledAnnotationHandler, _timeseriesFieldCollectionHandler), + aspectRecordSchema, + new SchemaAnnotationProcessor.AnnotationProcessOption()); // Extract TimeseriesField/ TimeseriesFieldCollection Specs - final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = new TimeseriesFieldSpecExtractor(); + final TimeseriesFieldSpecExtractor timeseriesFieldSpecExtractor = + new TimeseriesFieldSpecExtractor(); final DataSchemaRichContextTraverser timeseriesFieldSpecTraverser = new DataSchemaRichContextTraverser(timeseriesFieldSpecExtractor); timeseriesFieldSpecTraverser.traverse(processedTimeseriesFieldResult.getResultSchema()); - return new AspectSpec(aspectAnnotation, searchableFieldSpecExtractor.getSpecs(), - searchScoreFieldSpecExtractor.getSpecs(), relationshipFieldSpecExtractor.getSpecs(), + return new AspectSpec( + aspectAnnotation, + searchableFieldSpecExtractor.getSpecs(), + searchScoreFieldSpecExtractor.getSpecs(), + relationshipFieldSpecExtractor.getSpecs(), timeseriesFieldSpecExtractor.getTimeseriesFieldSpecs(), - timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), aspectRecordSchema, aspectClass); + timeseriesFieldSpecExtractor.getTimeseriesFieldCollectionSpecs(), + aspectRecordSchema, + aspectClass); } - failValidation(String.format("Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", - aspectRecordSchema.getName())); + failValidation( + String.format( + "Could not build aspect spec for aspect with name %s. Missing @Aspect annotation.", + aspectRecordSchema.getName())); return null; } @@ -252,7 +305,8 @@ private void validateEntitySpec(EntitySpec entitySpec) { if (entitySpec.getKeyAspectSpec() == null) { failValidation( - String.format("Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", + String.format( + "Did not find required Key Aspect with name %s in aspects for Entity %s in list of aspects.", entitySpec.getKeyAspectName(), entitySpec.getName())); } @@ -263,9 +317,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { for (final AspectSpec aspectSpec : entitySpec.getAspectSpecs()) { validateAspect(aspectSpec); if (aspectNames.contains(aspectSpec.getName())) { - failValidation(String.format( - "Could not build entity spec for entity with name %s." + " Found multiple Aspects with the same name %s", - entitySpec.getName(), aspectSpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Aspects with the same name %s", + entitySpec.getName(), aspectSpec.getName())); } aspectNames.add(aspectSpec.getName()); } @@ -273,8 +329,11 @@ private void validateEntitySpec(EntitySpec entitySpec) { // Validate entity name if (_entityNames.contains(entitySpec.getName().toLowerCase())) { // Duplicate entity found. - failValidation(String.format("Could not build entity spec for entity with name %s." - + " Found multiple Entity Snapshots with the same name.", entitySpec.getName())); + failValidation( + String.format( + "Could not build entity spec for entity with name %s." + + " Found multiple Entity Snapshots with the same name.", + entitySpec.getName())); } _entityNames.add(entitySpec.getName().toLowerCase()); @@ -283,13 +342,16 @@ private void validateEntitySpec(EntitySpec entitySpec) { private void validateAspect(final AspectSpec aspectSpec) { if (aspectSpec.isTimeseries()) { if (aspectSpec.getPegasusSchema().contains(TIMESTAMP_FIELD_NAME)) { - DataSchema timestamp = aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); + DataSchema timestamp = + aspectSpec.getPegasusSchema().getField(TIMESTAMP_FIELD_NAME).getType(); if (timestamp.getType() == DataSchema.Type.LONG) { return; } } - failValidation(String.format("Aspect %s is of type timeseries but does not include TimeseriesAspectBase", - aspectSpec.getName())); + failValidation( + String.format( + "Aspect %s is of type timeseries but does not include TimeseriesAspectBase", + aspectSpec.getName())); } } @@ -297,7 +359,8 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 0. Validate that schema is a Record if (entitySnapshotSchema.getType() != DataSchema.Type.RECORD) { failValidation( - String.format("Failed to validate entity snapshot schema of type %s. Schema must be of record type.", + String.format( + "Failed to validate entity snapshot schema of type %s. Schema must be of record type.", entitySnapshotSchema.getType().toString())); } @@ -306,30 +369,40 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh // 1. Validate Urn field if (entitySnapshotRecordSchema.getField(URN_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(URN_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.STRING) { - failValidation(String.format("Failed to validate entity snapshot schema with name %s. Invalid urn field.", - entitySnapshotRecordSchema.getName())); + != DataSchema.Type.STRING) { + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid urn field.", + entitySnapshotRecordSchema.getName())); } // 2. Validate Aspect Array if (entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME) == null || entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedType() - != DataSchema.Type.ARRAY) { + != DataSchema.Type.ARRAY) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field found. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } // 3. Validate Aspect Union final ArrayDataSchema aspectArray = - (ArrayDataSchema) entitySnapshotRecordSchema.getField(ASPECTS_FIELD_NAME).getType().getDereferencedDataSchema(); + (ArrayDataSchema) + entitySnapshotRecordSchema + .getField(ASPECTS_FIELD_NAME) + .getType() + .getDereferencedDataSchema(); if (aspectArray.getItems().getType() != DataSchema.Type.TYPEREF || aspectArray.getItems().getDereferencedType() != DataSchema.Type.UNION) { - failValidation(String.format( - "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " - + "'aspects' should be an array of union type.", entitySnapshotRecordSchema.getName())); + failValidation( + String.format( + "Failed to validate entity snapshot schema with name %s. Invalid aspects field field. " + + "'aspects' should be an array of union type.", + entitySnapshotRecordSchema.getName())); } return entitySnapshotRecordSchema; @@ -338,8 +411,10 @@ private RecordDataSchema validateSnapshot(@Nonnull final DataSchema entitySnapsh private RecordDataSchema validateAspect(@Nonnull final DataSchema aspectSchema) { // Validate that schema is a Record if (aspectSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate aspect schema of type %s. Schema must be of record type.", - aspectSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate aspect schema of type %s. Schema must be of record type.", + aspectSchema.getType().toString())); } return (RecordDataSchema) aspectSchema; } @@ -349,11 +424,13 @@ private void validateKeyAspect(@Nonnull final AspectSpec keyAspect) { RecordDataSchema schema = keyAspect.getPegasusSchema(); // Validate that each field is a string or enum. for (RecordDataSchema.Field field : schema.getFields()) { - if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) && !DataSchema.Type.ENUM.equals( - field.getType().getDereferencedType())) { - failValidation(String.format("Failed to validate key aspect nameed %s. Key " - + "aspects must only contain fields of STRING or ENUM type. Found %s.", keyAspect.getName(), - field.getType().toString())); + if (!DataSchema.Type.STRING.equals(field.getType().getDereferencedType()) + && !DataSchema.Type.ENUM.equals(field.getType().getDereferencedType())) { + failValidation( + String.format( + "Failed to validate key aspect nameed %s. Key " + + "aspects must only contain fields of STRING or ENUM type. Found %s.", + keyAspect.getName(), field.getType().toString())); } } } @@ -363,14 +440,9 @@ private void failValidation(@Nonnull final String message) { } public enum AnnotationExtractionMode { - /** - * Extract all annotations types, the default. - */ + /** Extract all annotations types, the default. */ DEFAULT, - /** - * Skip annotations on aspect record fields, only - * parse entity + aspect annotations. - */ + /** Skip annotations on aspect record fields, only parse entity + aspect annotations. */ IGNORE_ASPECT_FIELDS } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java index a25bf1c2dea62..0a265c46a5164 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EntitySpecUtils.java @@ -5,19 +5,17 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class EntitySpecUtils { - private EntitySpecUtils() { - } + private EntitySpecUtils() {} - public static List getEntityTimeseriesAspectNames(@Nonnull EntityRegistry entityRegistry, - @Nonnull String entityName) { + public static List getEntityTimeseriesAspectNames( + @Nonnull EntityRegistry entityRegistry, @Nonnull String entityName) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - final List timeseriesAspectNames = entitySpec.getAspectSpecs() - .stream() - .filter(x -> x.isTimeseries()) - .map(x -> x.getName()) - .collect(Collectors.toList()); + final List timeseriesAspectNames = + entitySpec.getAspectSpecs().stream() + .filter(x -> x.isTimeseries()) + .map(x -> x.getName()) + .collect(Collectors.toList()); return timeseriesAspectNames; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java index 20f0dfc70d465..09ec6641777f9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpec.java @@ -3,23 +3,14 @@ import com.linkedin.data.schema.RecordDataSchema; import com.linkedin.metadata.models.annotation.EventAnnotation; - -/** - * A specification of a DataHub Platform Event - */ +/** A specification of a DataHub Platform Event */ public interface EventSpec { - /** - * Returns the name of an event - */ + /** Returns the name of an event */ String getName(); - /** - * Returns the raw event annotation - */ + /** Returns the raw event annotation */ EventAnnotation getEventAnnotation(); - /** - * Returns the PDL schema object for the Event - */ + /** Returns the PDL schema object for the Event */ RecordDataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java index 04322b3b550cb..ceb984cdbc5b4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/EventSpecBuilder.java @@ -9,33 +9,32 @@ @Slf4j public class EventSpecBuilder { - public EventSpecBuilder() { - } + public EventSpecBuilder() {} public EventSpec buildEventSpec( - @Nonnull final String eventName, - @Nonnull final DataSchema eventDataSchema) { + @Nonnull final String eventName, @Nonnull final DataSchema eventDataSchema) { final RecordDataSchema eventRecordSchema = validateEvent(eventDataSchema); - final Object eventAnnotationObj = eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); + final Object eventAnnotationObj = + eventDataSchema.getProperties().get(EventAnnotation.ANNOTATION_NAME); if (eventAnnotationObj != null) { final EventAnnotation eventAnnotation = - EventAnnotation.fromPegasusAnnotationObject(eventAnnotationObj, eventRecordSchema.getFullName()); + EventAnnotation.fromPegasusAnnotationObject( + eventAnnotationObj, eventRecordSchema.getFullName()); - return new DefaultEventSpec( - eventName, - eventAnnotation, - eventRecordSchema); + return new DefaultEventSpec(eventName, eventAnnotation, eventRecordSchema); } return null; } private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { if (eventSchema.getType() != DataSchema.Type.RECORD) { - failValidation(String.format("Failed to validate event schema of type %s. Schema must be of record type.", - eventSchema.getType().toString())); + failValidation( + String.format( + "Failed to validate event schema of type %s. Schema must be of record type.", + eventSchema.getType().toString())); } return (RecordDataSchema) eventSchema; } @@ -43,4 +42,4 @@ private RecordDataSchema validateEvent(@Nonnull final DataSchema eventSchema) { private void failValidation(@Nonnull final String message) { throw new ModelValidationException(message); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java index 303fd06299356..b109f9498cba6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpec.java @@ -3,19 +3,15 @@ import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; - /** - * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the field + * Base interface for aspect field specs. Contains a) the path to the field and b) the schema of the + * field */ public interface FieldSpec { - /** - * Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. - */ + /** Returns the {@link PathSpec} corresponding to the field, relative to its parent aspect. */ PathSpec getPath(); - /** - * Returns the {@link DataSchema} associated with the aspect field. - */ + /** Returns the {@link DataSchema} associated with the aspect field. */ DataSchema getPegasusSchema(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java index ac1e1dfc21590..53a689602f27c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/FieldSpecUtils.java @@ -9,11 +9,9 @@ import java.util.Map; import java.util.Optional; - public class FieldSpecUtils { - private FieldSpecUtils() { - } + private FieldSpecUtils() {} public static String getSchemaFieldName(PathSpec pathSpec) { List components = pathSpec.getPathComponents(); @@ -25,16 +23,25 @@ public static String getSchemaFieldName(PathSpec pathSpec) { } public static Map getResolvedProperties(final DataSchema schema) { - return !schema.getResolvedProperties().isEmpty() ? schema.getResolvedProperties() : schema.getProperties(); + return !schema.getResolvedProperties().isEmpty() + ? schema.getResolvedProperties() + : schema.getProperties(); } public static Optional getPathSpecWithAspectName(TraverserContext context) { - Object aspectAnnotationObj = context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); - if (aspectAnnotationObj == null || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) + Object aspectAnnotationObj = + context.getTopLevelSchema().getProperties().get(AspectAnnotation.ANNOTATION_NAME); + if (aspectAnnotationObj == null + || !Map.class.isAssignableFrom(aspectAnnotationObj.getClass()) || !((Map) aspectAnnotationObj).containsKey(AspectAnnotation.NAME_FIELD)) { return Optional.empty(); } String aspectName = (((Map) aspectAnnotationObj).get(AspectAnnotation.NAME_FIELD)).toString(); - return Optional.of(new PathSpec(ImmutableList.builder().add(aspectName).addAll(context.getSchemaPathSpec()).build())); + return Optional.of( + new PathSpec( + ImmutableList.builder() + .add(aspectName) + .addAll(context.getSchemaPathSpec()) + .build())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java index 7dfe596c8de4c..549c0a9ef7916 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/ModelValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.models; -/** - * Exception thrown when Entity, Aspect models fail to be validated. - */ +/** Exception thrown when Entity, Aspect models fail to be validated. */ public class ModelValidationException extends RuntimeException { public ModelValidationException(String message) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java index 0124fc8ce7bb1..13678d29da730 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PartialEntitySpec.java @@ -12,13 +12,11 @@ import javax.annotation.Nonnull; import lombok.ToString; - /** - * A partially specified entity spec that can be used with a {@link com.linkedin.metadata.models.registry.PatchEntityRegistry}. - * Specifically, it does not require the following things compared to a {@link DefaultEntitySpec} - * - a key aspect - * - snapshot schemas for the entity - * - typeref schemas for aspect + * A partially specified entity spec that can be used with a {@link + * com.linkedin.metadata.models.registry.PatchEntityRegistry}. Specifically, it does not require the + * following things compared to a {@link DefaultEntitySpec} - a key aspect - snapshot schemas for + * the entity - typeref schemas for aspect */ @ToString public class PartialEntitySpec implements EntitySpec { @@ -26,8 +24,10 @@ public class PartialEntitySpec implements EntitySpec { private final EntityAnnotation _entityAnnotation; private final Map _aspectSpecs; - public PartialEntitySpec(@Nonnull final Collection aspectSpecs, final EntityAnnotation entityAnnotation) { - _aspectSpecs = aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); + public PartialEntitySpec( + @Nonnull final Collection aspectSpecs, final EntityAnnotation entityAnnotation) { + _aspectSpecs = + aspectSpecs.stream().collect(Collectors.toMap(AspectSpec::getName, Function.identity())); _entityAnnotation = entityAnnotation; } @@ -82,7 +82,7 @@ public RecordDataSchema getSnapshotSchema() { @Override public TyperefDataSchema getAspectTyperefSchema() { - throw new UnsupportedOperationException("Partial entity specs do not contain aspect typeref schemas"); + throw new UnsupportedOperationException( + "Partial entity specs do not contain aspect typeref schemas"); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java index 1a262731a48af..a4dabea0a3345 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/PropertyOverrideComparator.java @@ -3,7 +3,6 @@ import java.util.Comparator; import org.apache.commons.lang3.tuple.Pair; - public class PropertyOverrideComparator implements Comparator> { public int compare(Pair o1, Pair o2) { return Integer.compare(o2.getKey().split("/").length, o1.getKey().split("/").length); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java index 76454850aa2f8..06d6994e7dc45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpec.java @@ -8,7 +8,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class RelationshipFieldSpec implements FieldSpec { @@ -16,17 +15,13 @@ public class RelationshipFieldSpec implements FieldSpec { @NonNull RelationshipAnnotation relationshipAnnotation; @NonNull DataSchema pegasusSchema; - /** - * Returns the name of the outbound relationship extending from the field. - */ + /** Returns the name of the outbound relationship extending from the field. */ @Nonnull public String getRelationshipName() { return relationshipAnnotation.getName(); } - /** - * Returns a list of entity names representing the destination node type of the relationship. - */ + /** Returns a list of entity names representing the destination node type of the relationship. */ @Nonnull public List getValidDestinationTypes() { return relationshipAnnotation.getValidDestinationTypes(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java index 99c0908abbd02..ad32b315f6b1a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/RelationshipFieldSpecExtractor.java @@ -12,7 +12,6 @@ import java.util.List; import java.util.Map; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link RelationshipFieldSpec}s * from an aspect schema. @@ -41,25 +40,31 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order final Object primaryAnnotationObj = properties.get(RelationshipAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); } // Next, check resolved properties for annotations on primitives. - final Map resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); - final Object resolvedAnnotationObj = resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); + final Map resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); + final Object resolvedAnnotationObj = + resolvedProperties.get(RelationshipAnnotation.ANNOTATION_NAME); if (resolvedAnnotationObj != null) { - if (currentSchema.isPrimitive() && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { + if (currentSchema.isPrimitive() + && isValidPrimitiveType((PrimitiveDataSchema) currentSchema)) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); - final RelationshipAnnotation annotation = RelationshipAnnotation.fromPegasusAnnotationObject( - resolvedAnnotationObj, - path.toString() - ); - final RelationshipFieldSpec fieldSpec = new RelationshipFieldSpec(path, annotation, currentSchema); + final RelationshipAnnotation annotation = + RelationshipAnnotation.fromPegasusAnnotationObject( + resolvedAnnotationObj, path.toString()); + final RelationshipFieldSpec fieldSpec = + new RelationshipFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); return; } - throw new ModelValidationException(String.format("Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @Relationship Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -78,7 +83,8 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return DataSchema.Type.STRING.equals(schema.getDereferencedDataSchema().getDereferencedType()); } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -87,20 +93,19 @@ private void validatePropertiesAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - RelationshipAnnotation.ANNOTATION_NAME, - pathStr - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + RelationshipAnnotation.ANNOTATION_NAME, pathStr)); } Map annotationMap = (Map) annotationObj; for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { throw new ModelValidationException( - String.format("Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, - currentSchema.getType())); + String.format( + "Invalid @Relationship Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java index 2346923d70a48..bdd3546b75857 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpec.java @@ -6,10 +6,9 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchScoreFieldSpec implements FieldSpec { @NonNull PathSpec path; @NonNull SearchScoreAnnotation searchScoreAnnotation; @NonNull DataSchema pegasusSchema; -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java index c4f767c4a24bc..776d5ee7a20b7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchScoreFieldSpecExtractor.java @@ -15,7 +15,6 @@ import java.util.Optional; import java.util.Set; - /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchScoreFieldSpec}s * from an aspect schema. @@ -24,7 +23,8 @@ public class SearchScoreFieldSpecExtractor implements SchemaVisitor { private final List _specs = new ArrayList<>(); private static final Set NUMERIC_TYPES = - ImmutableSet.of(DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); + ImmutableSet.of( + DataSchema.Type.INT, DataSchema.Type.LONG, DataSchema.Type.FLOAT, DataSchema.Type.DOUBLE); public List getSpecs() { return _specs; @@ -46,9 +46,10 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order if (currentSchema.isPrimitive() && isNumericType((PrimitiveDataSchema) currentSchema)) { extractAnnotation(annotationObj, currentSchema, context); } else { - throw new ModelValidationException(String.format( - "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", - context.getSchemaPathSpec().toString())); + throw new ModelValidationException( + String.format( + "Invalid @SearchScore Annotation at %s. This annotation can only be put in on a numeric singular (non-array) field", + context.getSchemaPathSpec().toString())); } } } @@ -59,19 +60,21 @@ private Object getAnnotationObj(TraverserContext context) { return properties.get(SearchScoreAnnotation.ANNOTATION_NAME); } - private void extractAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); if (context.getSchemaPathSpec().contains(PathSpec.WILDCARD)) { throw new ModelValidationException( - String.format("SearchScore annotation can only be put on singular fields (non-arrays): path %s", + String.format( + "SearchScore annotation can only be put on singular fields (non-arrays): path %s", fullPath.orElse(path))); } final SearchScoreAnnotation annotation = - SearchScoreAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); - final SearchScoreFieldSpec fieldSpec = new SearchScoreFieldSpec(path, annotation, currentSchema); + SearchScoreAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + final SearchScoreFieldSpec fieldSpec = + new SearchScoreFieldSpec(path, annotation, currentSchema); _specs.add(fieldSpec); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java index 9ebd7e991df48..217bd8e58340a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class SearchableFieldSpec implements FieldSpec { @@ -17,4 +16,4 @@ public class SearchableFieldSpec implements FieldSpec { public boolean isArray() { return path.getPathComponents().contains("*"); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java index 8f2f42cd69cae..add6a88369b13 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/SearchableFieldSpecExtractor.java @@ -11,14 +11,13 @@ import com.linkedin.data.schema.annotation.SchemaVisitorTraversalResult; import com.linkedin.data.schema.annotation.TraverserContext; import com.linkedin.metadata.models.annotation.SearchableAnnotation; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; +import lombok.extern.slf4j.Slf4j; /** * Implementation of {@link SchemaVisitor} responsible for extracting {@link SearchableFieldSpec}s @@ -33,6 +32,7 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { private static final String MAP = "map"; public static final Map PRIMARY_URN_SEARCH_PROPERTIES; + static { PRIMARY_URN_SEARCH_PROPERTIES = new DataMap(); PRIMARY_URN_SEARCH_PROPERTIES.put("enableAutocomplete", "true"); @@ -41,10 +41,8 @@ public class SearchableFieldSpecExtractor implements SchemaVisitor { } private static final float SECONDARY_URN_FACTOR = 0.1f; - private static final Set SECONDARY_URN_FIELD_TYPES = ImmutableSet.builder() - .add("URN") - .add("URN_PARTIAL") - .build(); + private static final Set SECONDARY_URN_FIELD_TYPES = + ImmutableSet.builder().add("URN").add("URN_PARTIAL").build(); public List getSpecs() { return _specs; @@ -72,7 +70,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order extractSearchableAnnotation(annotationObj, currentSchema, context); } else { throw new ModelValidationException( - String.format("Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); + String.format( + "Invalid @Searchable Annotation at %s", context.getSchemaPathSpec().toString())); } } } @@ -86,34 +85,45 @@ private Object getAnnotationObj(TraverserContext context) { final Object primaryAnnotationObj = properties.get(SearchableAnnotation.ANNOTATION_NAME); if (primaryAnnotationObj != null) { - validatePropertiesAnnotation(currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); - // Unfortunately, annotations on collections always need to be a nested map (byproduct of making overrides work) + validatePropertiesAnnotation( + currentSchema, primaryAnnotationObj, context.getTraversePath().toString()); + // Unfortunately, annotations on collections always need to be a nested map (byproduct of + // making overrides work) // As such, for annotation maps, we make it a single entry map, where the key has no meaning - if (currentSchema.getDereferencedType() == DataSchema.Type.MAP && primaryAnnotationObj instanceof Map + if (currentSchema.getDereferencedType() == DataSchema.Type.MAP + && primaryAnnotationObj instanceof Map && !((Map) primaryAnnotationObj).isEmpty()) { return ((Map) primaryAnnotationObj).entrySet().stream().findFirst().get().getValue(); } } - // Check if the path has map in it. Individual values of the maps (actual maps are caught above) can be ignored + // Check if the path has map in it. Individual values of the maps (actual maps are caught above) + // can be ignored if (context.getTraversePath().contains(MAP)) { return null; } - final boolean isUrn = ((DataMap) context.getParentSchema().getProperties() - .getOrDefault("java", new DataMap())) - .getOrDefault("class", "").equals("com.linkedin.common.urn.Urn"); + final boolean isUrn = + ((DataMap) context.getParentSchema().getProperties().getOrDefault("java", new DataMap())) + .getOrDefault("class", "") + .equals("com.linkedin.common.urn.Urn"); - final Map resolvedProperties = FieldSpecUtils.getResolvedProperties(currentSchema); + final Map resolvedProperties = + FieldSpecUtils.getResolvedProperties(currentSchema); // if primary doesn't have an annotation, then ignore secondary urns if (isUrn && primaryAnnotationObj != null) { - DataMap annotationMap = (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); + DataMap annotationMap = + (DataMap) resolvedProperties.get(SearchableAnnotation.ANNOTATION_NAME); Map result = new HashMap<>(annotationMap); // Override boostScore for secondary urn - if (SECONDARY_URN_FIELD_TYPES.contains(annotationMap.getOrDefault("fieldType", "URN").toString())) { - result.put("boostScore", Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) * SECONDARY_URN_FACTOR); + if (SECONDARY_URN_FIELD_TYPES.contains( + annotationMap.getOrDefault("fieldType", "URN").toString())) { + result.put( + "boostScore", + Float.parseFloat(String.valueOf(annotationMap.getOrDefault("boostScore", "1.0"))) + * SECONDARY_URN_FACTOR); } return result; @@ -123,40 +133,47 @@ private Object getAnnotationObj(TraverserContext context) { } } - private void extractSearchableAnnotation(final Object annotationObj, final DataSchema currentSchema, - final TraverserContext context) { + private void extractSearchableAnnotation( + final Object annotationObj, final DataSchema currentSchema, final TraverserContext context) { final PathSpec path = new PathSpec(context.getSchemaPathSpec()); final Optional fullPath = FieldSpecUtils.getPathSpecWithAspectName(context); SearchableAnnotation annotation = - SearchableAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - currentSchema.getDereferencedType(), path.toString()); + SearchableAnnotation.fromPegasusAnnotationObject( + annotationObj, + FieldSpecUtils.getSchemaFieldName(path), + currentSchema.getDereferencedType(), + path.toString()); String schemaPathSpec = context.getSchemaPathSpec().toString(); - if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) && !_searchFieldNamesToPatch.get( - annotation.getFieldName()).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(annotation.getFieldName()) + && !_searchFieldNamesToPatch.get(annotation.getFieldName()).equals(schemaPathSpec)) { // Try to use path String pathName = path.toString().replace('/', '_').replace("*", ""); if (pathName.startsWith("_")) { pathName = pathName.replaceFirst("_", ""); } - if (_searchFieldNamesToPatch.containsKey(pathName) && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { + if (_searchFieldNamesToPatch.containsKey(pathName) + && !_searchFieldNamesToPatch.get(pathName).equals(schemaPathSpec)) { throw new ModelValidationException( - String.format("Entity has multiple searchable fields with the same field name %s, path: %s", annotation.getFieldName(), fullPath.orElse(path))); + String.format( + "Entity has multiple searchable fields with the same field name %s, path: %s", + annotation.getFieldName(), fullPath.orElse(path))); } else { - annotation = new SearchableAnnotation( - pathName, - annotation.getFieldType(), - annotation.isQueryByDefault(), - annotation.isEnableAutocomplete(), - annotation.isAddToFilters(), - annotation.isAddHasValuesToFilters(), - annotation.getFilterNameOverride(), - annotation.getHasValuesFilterNameOverride(), - annotation.getBoostScore(), - annotation.getHasValuesFieldName(), - annotation.getNumValuesFieldName(), - annotation.getWeightsPerFieldValue(), - annotation.getFieldNameAliases()); + annotation = + new SearchableAnnotation( + pathName, + annotation.getFieldType(), + annotation.isQueryByDefault(), + annotation.isEnableAutocomplete(), + annotation.isAddToFilters(), + annotation.isAddHasValuesToFilters(), + annotation.getFilterNameOverride(), + annotation.getHasValuesFilterNameOverride(), + annotation.getBoostScore(), + annotation.getHasValuesFieldName(), + annotation.getNumValuesFieldName(), + annotation.getWeightsPerFieldValue(), + annotation.getFieldNameAliases()); } } log.debug("Searchable annotation for field: {} : {}", schemaPathSpec, annotation); @@ -184,35 +201,39 @@ private Boolean isValidPrimitiveType(final PrimitiveDataSchema schema) { return true; } - private void validatePropertiesAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validatePropertiesAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. - if (currentSchema.isPrimitive() || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) || currentSchema - .getDereferencedType() - .equals(DataSchema.Type.MAP)) { + if (currentSchema.isPrimitive() + || currentSchema.getDereferencedType().equals(DataSchema.Type.ENUM) + || currentSchema.getDereferencedType().equals(DataSchema.Type.MAP)) { return; } // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - SearchableAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + SearchableAnnotation.ANNOTATION_NAME, pathStr)); } Map annotationMap = (Map) annotationObj; if (annotationMap.size() == 0) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } for (String key : annotationMap.keySet()) { if (!key.startsWith(Character.toString(PathSpec.SEPARATOR))) { - throw new ModelValidationException(String.format( - "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", - pathStr, currentSchema.getType())); + throw new ModelValidationException( + String.format( + "Invalid @Searchable Annotation at %s. Annotation placed on invalid field of type %s. Must be placed on primitive field.", + pathStr, currentSchema.getType())); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java index 5771144fd33c2..efdb8b876cbda 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldCollectionSpec.java @@ -7,7 +7,6 @@ import lombok.Data; import lombok.NonNull; - @Data public class TimeseriesFieldCollectionSpec implements FieldSpec { @NonNull PathSpec path; @@ -24,4 +23,4 @@ public String getName() { public String getKeyPathFromAnnotation() { return path + "/" + timeseriesFieldCollectionAnnotation.getKey(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java index cbd0c0581600e..6ad7c1c9d34ca 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpec.java @@ -6,7 +6,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class TimeseriesFieldSpec implements FieldSpec { @NonNull PathSpec path; @@ -16,4 +15,4 @@ public class TimeseriesFieldSpec implements FieldSpec { public String getName() { return timeseriesFieldAnnotation.getStatName(); } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java index 4391bd1497741..e29b1a88afca4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/TimeseriesFieldSpecExtractor.java @@ -15,16 +15,16 @@ import java.util.Optional; import lombok.Getter; - /** - * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} and - * {@link TimeseriesFieldCollectionSpec} from an aspect schema. + * Implementation of {@link SchemaVisitor} responsible for extracting {@link TimeseriesFieldSpec} + * and {@link TimeseriesFieldCollectionSpec} from an aspect schema. */ @Getter public class TimeseriesFieldSpecExtractor implements SchemaVisitor { private final List timeseriesFieldSpecs = new ArrayList<>(); - private final List timeseriesFieldCollectionSpecs = new ArrayList<>(); + private final List timeseriesFieldCollectionSpecs = + new ArrayList<>(); private final Map namesToPath = new HashMap<>(); @Override @@ -40,16 +40,22 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order // First, check for collection in primary properties final Map primaryProperties = context.getEnclosingField().getProperties(); - final Object timeseriesFieldAnnotationObj = primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); + final Object timeseriesFieldAnnotationObj = + primaryProperties.get(TimeseriesFieldAnnotation.ANNOTATION_NAME); final Object timeseriesFieldCollectionAnnotationObj = primaryProperties.get(TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME); - if (currentSchema.getType() == DataSchema.Type.RECORD && timeseriesFieldCollectionAnnotationObj != null) { - validateCollectionAnnotation(currentSchema, timeseriesFieldCollectionAnnotationObj, + if (currentSchema.getType() == DataSchema.Type.RECORD + && timeseriesFieldCollectionAnnotationObj != null) { + validateCollectionAnnotation( + currentSchema, + timeseriesFieldCollectionAnnotationObj, context.getTraversePath().toString()); - addTimeseriesFieldCollectionSpec(currentSchema, path, timeseriesFieldCollectionAnnotationObj); - } else if (timeseriesFieldAnnotationObj != null && !path.getPathComponents() - .get(path.getPathComponents().size() - 1) - .equals("*")) { // For arrays make sure to add just the array form + addTimeseriesFieldCollectionSpec( + currentSchema, path, timeseriesFieldCollectionAnnotationObj); + } else if (timeseriesFieldAnnotationObj != null + && !path.getPathComponents() + .get(path.getPathComponents().size() - 1) + .equals("*")) { // For arrays make sure to add just the array form addTimeseriesFieldSpec(currentSchema, path, timeseriesFieldAnnotationObj); } else { addTimeseriesFieldCollectionKey(path); @@ -57,7 +63,8 @@ public void callbackOnContext(TraverserContext context, DataSchemaTraverse.Order } } - private void validateCollectionAnnotation(DataSchema currentSchema, Object annotationObj, String pathStr) { + private void validateCollectionAnnotation( + DataSchema currentSchema, Object annotationObj, String pathStr) { // If primitive, assume the annotation is well formed until resolvedProperties reflects it. if (currentSchema.isPrimitive()) { @@ -66,21 +73,25 @@ private void validateCollectionAnnotation(DataSchema currentSchema, Object annot // Required override case. If the annotation keys are not overrides, they are incorrect. if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", - TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared inside %s: Invalid value type provided (Expected Map)", + TimeseriesFieldCollectionAnnotation.ANNOTATION_NAME, pathStr)); } } - private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldCollectionSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { if (currentSchema.getType() == DataSchema.Type.RECORD) { TimeseriesFieldCollectionAnnotation annotation = - TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject(annotationObj, - FieldSpecUtils.getSchemaFieldName(path), path.toString()); - if (namesToPath.containsKey(annotation.getCollectionName()) && !namesToPath.get(annotation.getCollectionName()) - .equals(path.toString())) { + TimeseriesFieldCollectionAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); + if (namesToPath.containsKey(annotation.getCollectionName()) + && !namesToPath.get(annotation.getCollectionName()).equals(path.toString())) { throw new ModelValidationException( - String.format("There are multiple fields with the same name: %s", annotation.getCollectionName())); + String.format( + "There are multiple fields with the same name: %s", + annotation.getCollectionName())); } namesToPath.put(annotation.getCollectionName(), path.toString()); timeseriesFieldCollectionSpecs.add( @@ -88,25 +99,32 @@ private void addTimeseriesFieldCollectionSpec(DataSchema currentSchema, PathSpec } } - private void addTimeseriesFieldSpec(DataSchema currentSchema, PathSpec path, Object annotationObj) { + private void addTimeseriesFieldSpec( + DataSchema currentSchema, PathSpec path, Object annotationObj) { // First check whether the stat is part of a collection String pathStr = path.toString(); - Optional fieldCollectionSpec = timeseriesFieldCollectionSpecs.stream() - .filter(spec -> pathStr.startsWith(spec.getPath().toString())) - .findFirst(); + Optional fieldCollectionSpec = + timeseriesFieldCollectionSpecs.stream() + .filter(spec -> pathStr.startsWith(spec.getPath().toString())) + .findFirst(); TimeseriesFieldAnnotation annotation = - TimeseriesFieldAnnotation.fromPegasusAnnotationObject(annotationObj, FieldSpecUtils.getSchemaFieldName(path), - path.toString()); + TimeseriesFieldAnnotation.fromPegasusAnnotationObject( + annotationObj, FieldSpecUtils.getSchemaFieldName(path), path.toString()); if (fieldCollectionSpec.isPresent()) { - fieldCollectionSpec.get() + fieldCollectionSpec + .get() .getTimeseriesFieldSpecMap() - .put(annotation.getStatName(), - new TimeseriesFieldSpec(getRelativePath(path, fieldCollectionSpec.get().getPath()), annotation, + .put( + annotation.getStatName(), + new TimeseriesFieldSpec( + getRelativePath(path, fieldCollectionSpec.get().getPath()), + annotation, currentSchema)); } else { if (path.getPathComponents().contains("*")) { throw new ModelValidationException( - String.format("No matching collection found for the given timeseries field %s", pathStr)); + String.format( + "No matching collection found for the given timeseries field %s", pathStr)); } timeseriesFieldSpecs.add(new TimeseriesFieldSpec(path, annotation, currentSchema)); } @@ -123,7 +141,9 @@ private void addTimeseriesFieldCollectionKey(PathSpec path) { private PathSpec getRelativePath(PathSpec child, PathSpec parent) { return new PathSpec( - child.getPathComponents().subList(parent.getPathComponents().size(), child.getPathComponents().size())); + child + .getPathComponents() + .subList(parent.getPathComponents().size(), child.getPathComponents().size())); } @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java index 3d9e1cf04cd36..7aa5be69a0541 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AnnotationUtils.java @@ -4,11 +4,11 @@ import java.util.Optional; import lombok.experimental.UtilityClass; - @UtilityClass public class AnnotationUtils { Optional getField(final Map fieldMap, final String fieldName, final Class fieldType) { - if (fieldMap.containsKey(fieldName) && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { + if (fieldMap.containsKey(fieldName) + && fieldType.isAssignableFrom(fieldMap.get(fieldName).getClass())) { return Optional.of(fieldType.cast(fieldMap.get(fieldName))); } return Optional.empty(); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java index d116170e10d22..56dca9ab3eaf9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/AspectAnnotation.java @@ -7,10 +7,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Aspect annotation metadata. - */ +/** Simple object representation of the @Aspect annotation metadata. */ @Value public class AspectAnnotation { @@ -29,15 +26,12 @@ public class AspectAnnotation { @Nonnull public static AspectAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } final Map map = (Map) annotationObj; final Optional name = AnnotationUtils.getField(map, NAME_FIELD, String.class); @@ -45,10 +39,7 @@ public static AspectAnnotation fromSchemaProperty( throw new ModelValidationException( String.format( "Failed to validated @%s annotation declared at %s: missing '%s' property", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } final Optional type = AnnotationUtils.getField(map, TYPE_FIELD, String.class); @@ -56,6 +47,10 @@ public static AspectAnnotation fromSchemaProperty( Optional autoRender = AnnotationUtils.getField(map, AUTO_RENDER_FIELD, Boolean.class); Optional renderSpec = AnnotationUtils.getField(map, RENDER_SPEC_FIELD, DataMap.class); - return new AspectAnnotation(name.get(), isTimeseries, autoRender.orElseGet(() -> false), renderSpec.orElseGet(() -> null)); + return new AspectAnnotation( + name.get(), + isTimeseries, + autoRender.orElseGet(() -> false), + renderSpec.orElseGet(() -> null)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java index e7174dcc9b176..94cdf130d1e88 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EntityAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Entity annotation metadata. - */ +/** Simple object representation of the @Entity annotation metadata. */ @Value public class EntityAnnotation { @@ -22,39 +19,31 @@ public class EntityAnnotation { @Nonnull public static EntityAnnotation fromSchemaProperty( - @Nonnull final Object annotationObj, - @Nonnull final String context) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional name = AnnotationUtils.getField(map, NAME_FIELD, String.class); - final Optional keyAspect = AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); + final Optional keyAspect = + AnnotationUtils.getField(map, KEY_ASPECT_FIELD, String.class); if (!name.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } if (!keyAspect.isPresent()) { throw new ModelValidationException( String.format( "Failed to validate @%s annotation declared at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - KEY_ASPECT_FIELD - )); + ANNOTATION_NAME, context, KEY_ASPECT_FIELD)); } return new EntityAnnotation(name.get(), keyAspect.get()); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java index ee0229dabfc37..ddfa23412955d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/EventAnnotation.java @@ -6,10 +6,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * An annotation associated with a DataHub Event. - */ +/** An annotation associated with a DataHub Event. */ @Value public class EventAnnotation { @@ -20,15 +17,12 @@ public class EventAnnotation { @Nonnull public static EventAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -37,10 +31,7 @@ public static EventAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } return new EventAnnotation(name.get()); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java index 7631f95c3a5ff..a22ef56d60006 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/RelationshipAnnotation.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import lombok.Value; - -/** - * Simple object representation of the @Relationship annotation metadata. - */ +/** Simple object representation of the @Relationship annotation metadata. */ @Value public class RelationshipAnnotation { @@ -38,15 +35,12 @@ public class RelationshipAnnotation { @Nonnull public static RelationshipAnnotation fromPegasusAnnotationObject( - @Nonnull final Object annotationObj, - @Nonnull final String context - ) { + @Nonnull final Object annotationObj, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", - ANNOTATION_NAME, - context - )); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; @@ -55,13 +49,11 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type String", - ANNOTATION_NAME, - context, - NAME_FIELD - )); + ANNOTATION_NAME, context, NAME_FIELD)); } - final Optional entityTypesList = AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); + final Optional entityTypesList = + AnnotationUtils.getField(map, ENTITY_TYPES_FIELD, List.class); final List entityTypes = new ArrayList<>(); if (entityTypesList.isPresent()) { for (Object entityTypeObj : entityTypesList.get()) { @@ -69,21 +61,22 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( throw new ModelValidationException( String.format( "Failed to validate @%s annotation at %s: Invalid field '%s'. Expected type List", - ANNOTATION_NAME, - context, - ENTITY_TYPES_FIELD - )); + ANNOTATION_NAME, context, ENTITY_TYPES_FIELD)); } entityTypes.add((String) entityTypeObj); } } - final Optional isUpstream = AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); - final Optional isLineage = AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); + final Optional isUpstream = + AnnotationUtils.getField(map, IS_UPSTREAM_FIELD, Boolean.class); + final Optional isLineage = + AnnotationUtils.getField(map, IS_LINEAGE_FIELD, Boolean.class); final Optional createdOn = AnnotationUtils.getField(map, CREATED_ON, String.class); - final Optional createdActor = AnnotationUtils.getField(map, CREATED_ACTOR, String.class); + final Optional createdActor = + AnnotationUtils.getField(map, CREATED_ACTOR, String.class); final Optional updatedOn = AnnotationUtils.getField(map, UPDATED_ON, String.class); - final Optional updatedActor = AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); + final Optional updatedActor = + AnnotationUtils.getField(map, UPDATED_ACTOR, String.class); final Optional properties = AnnotationUtils.getField(map, PROPERTIES, String.class); return new RelationshipAnnotation( @@ -95,6 +88,6 @@ public static RelationshipAnnotation fromPegasusAnnotationObject( createdActor.orElse(null), updatedOn.orElse(null), updatedActor.orElse(null), - properties.orElse(null) - ); } -} \ No newline at end of file + properties.orElse(null)); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java index 77c5920ca9ba8..2221650eac1c9 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchScoreAnnotation.java @@ -8,7 +8,6 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - /** * Annotation indicating how the search results should be ranked by the underlying search service */ @@ -35,26 +34,31 @@ public enum Modifier { } @Nonnull - public static SearchScoreAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static SearchScoreAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional weight = AnnotationUtils.getField(map, "weight", Double.class); - final Optional defaultValue = AnnotationUtils.getField(map, "defaultValue", Double.class); + final Optional defaultValue = + AnnotationUtils.getField(map, "defaultValue", Double.class); final Optional modifierStr = AnnotationUtils.getField(map, "modifier", String.class); if (modifierStr.isPresent() && !EnumUtils.isValidEnum(Modifier.class, modifierStr.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", - ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'modifier'. Invalid modifier provided. Valid modifiers are %s", + ANNOTATION_NAME, context, Arrays.toString(Modifier.values()))); } final Optional modifier = modifierStr.map(Modifier::valueOf); - return new SearchScoreAnnotation(fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), - modifier); + return new SearchScoreAnnotation( + fieldName.orElse(schemaFieldName), weight.orElse(1.0), defaultValue.orElse(0.0), modifier); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java index efa30a948e237..d5eae2a2315fa 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/SearchableAnnotation.java @@ -4,7 +4,6 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.ModelValidationException; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -15,17 +14,19 @@ import lombok.Value; import org.apache.commons.lang3.EnumUtils; - -/** - * Simple object representation of the @Searchable annotation metadata. - */ +/** Simple object representation of the @Searchable annotation metadata. */ @Value public class SearchableAnnotation { public static final String FIELD_NAME_ALIASES = "fieldNameAliases"; public static final String ANNOTATION_NAME = "Searchable"; private static final Set DEFAULT_QUERY_FIELD_TYPES = - ImmutableSet.of(FieldType.TEXT, FieldType.TEXT_PARTIAL, FieldType.WORD_GRAM, FieldType.URN, FieldType.URN_PARTIAL); + ImmutableSet.of( + FieldType.TEXT, + FieldType.TEXT_PARTIAL, + FieldType.WORD_GRAM, + FieldType.URN, + FieldType.URN_PARTIAL); // Name of the field in the search index. Defaults to the field name in the schema String fieldName; @@ -71,12 +72,15 @@ public enum FieldType { } @Nonnull - public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final DataSchema.Type schemaDataType, + public static SearchableAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final DataSchema.Type schemaDataType, @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -84,23 +88,32 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob final Optional fieldName = AnnotationUtils.getField(map, "fieldName", String.class); final Optional fieldType = AnnotationUtils.getField(map, "fieldType", String.class); if (fieldType.isPresent() && !EnumUtils.isValidEnum(FieldType.class, fieldType.get())) { - throw new ModelValidationException(String.format( - "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", - ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); + throw new ModelValidationException( + String.format( + "Failed to validate @%s annotation declared at %s: Invalid field 'fieldType'. Invalid fieldType provided. Valid types are %s", + ANNOTATION_NAME, context, Arrays.toString(FieldType.values()))); } - final Optional queryByDefault = AnnotationUtils.getField(map, "queryByDefault", Boolean.class); - final Optional enableAutocomplete = AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); - final Optional addToFilters = AnnotationUtils.getField(map, "addToFilters", Boolean.class); - final Optional addHasValuesToFilters = AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); - final Optional filterNameOverride = AnnotationUtils.getField(map, "filterNameOverride", String.class); + final Optional queryByDefault = + AnnotationUtils.getField(map, "queryByDefault", Boolean.class); + final Optional enableAutocomplete = + AnnotationUtils.getField(map, "enableAutocomplete", Boolean.class); + final Optional addToFilters = + AnnotationUtils.getField(map, "addToFilters", Boolean.class); + final Optional addHasValuesToFilters = + AnnotationUtils.getField(map, "addHasValuesToFilters", Boolean.class); + final Optional filterNameOverride = + AnnotationUtils.getField(map, "filterNameOverride", String.class); final Optional hasValuesFilterNameOverride = AnnotationUtils.getField(map, "hasValuesFilterNameOverride", String.class); final Optional boostScore = AnnotationUtils.getField(map, "boostScore", Double.class); - final Optional hasValuesFieldName = AnnotationUtils.getField(map, "hasValuesFieldName", String.class); - final Optional numValuesFieldName = AnnotationUtils.getField(map, "numValuesFieldName", String.class); + final Optional hasValuesFieldName = + AnnotationUtils.getField(map, "hasValuesFieldName", String.class); + final Optional numValuesFieldName = + AnnotationUtils.getField(map, "numValuesFieldName", String.class); final Optional weightsPerFieldValueMap = - AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class).map(m -> (Map) m); + AnnotationUtils.getField(map, "weightsPerFieldValue", Map.class) + .map(m -> (Map) m); final List fieldNameAliases = getFieldNameAliases(map); final FieldType resolvedFieldType = getFieldType(fieldType, schemaDataType); @@ -120,7 +133,8 @@ public static SearchableAnnotation fromPegasusAnnotationObject(@Nonnull final Ob fieldNameAliases); } - private static FieldType getFieldType(Optional maybeFieldType, DataSchema.Type schemaDataType) { + private static FieldType getFieldType( + Optional maybeFieldType, DataSchema.Type schemaDataType) { if (!maybeFieldType.isPresent()) { return getDefaultFieldType(schemaDataType); } @@ -139,7 +153,8 @@ private static FieldType getDefaultFieldType(DataSchema.Type schemaDataType) { } } - private static Boolean getQueryByDefault(Optional maybeQueryByDefault, FieldType fieldType) { + private static Boolean getQueryByDefault( + Optional maybeQueryByDefault, FieldType fieldType) { if (!maybeQueryByDefault.isPresent()) { if (DEFAULT_QUERY_FIELD_TYPES.contains(fieldType)) { return Boolean.TRUE; @@ -168,7 +183,8 @@ private static String capitalizeFirstLetter(String str) { private static List getFieldNameAliases(Map map) { final List aliases = new ArrayList<>(); - final Optional fieldNameAliases = AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); + final Optional fieldNameAliases = + AnnotationUtils.getField(map, FIELD_NAME_ALIASES, List.class); if (fieldNameAliases.isPresent()) { for (Object alias : fieldNameAliases.get()) { aliases.add((String) alias); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java index ca74c2df385f1..62ab073e41acd 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldAnnotation { @@ -16,23 +15,29 @@ public class TimeseriesFieldAnnotation { AggregationType aggregationType; @Nonnull - public static TimeseriesFieldAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } Map map = (Map) annotationObj; final Optional statName = AnnotationUtils.getField(map, "name", String.class); - final Optional aggregationType = AnnotationUtils.getField(map, "aggregationType", String.class); + final Optional aggregationType = + AnnotationUtils.getField(map, "aggregationType", String.class); - return new TimeseriesFieldAnnotation(statName.orElse(schemaFieldName), + return new TimeseriesFieldAnnotation( + statName.orElse(schemaFieldName), aggregationType.map(AggregationType::valueOf).orElse(AggregationType.LATEST)); } public enum AggregationType { - LATEST, SUM + LATEST, + SUM } -} \ No newline at end of file +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java index c507d88445cdf..d8816e0667316 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/annotation/TimeseriesFieldCollectionAnnotation.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class TimeseriesFieldCollectionAnnotation { public static final String ANNOTATION_NAME = "TimeseriesFieldCollection"; @@ -15,11 +14,14 @@ public class TimeseriesFieldCollectionAnnotation { String key; @Nonnull - public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@Nonnull final Object annotationObj, - @Nonnull final String schemaFieldName, @Nonnull final String context) { + public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject( + @Nonnull final Object annotationObj, + @Nonnull final String schemaFieldName, + @Nonnull final String context) { if (!Map.class.isAssignableFrom(annotationObj.getClass())) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", + String.format( + "Failed to validate @%s annotation declared at %s: Invalid value type provided (Expected Map)", ANNOTATION_NAME, context)); } @@ -28,10 +30,12 @@ public static TimeseriesFieldCollectionAnnotation fromPegasusAnnotationObject(@N final Optional key = AnnotationUtils.getField(map, "key", String.class); if (!key.isPresent()) { throw new ModelValidationException( - String.format("Failed to validate @%s annotation declared at %s: 'key' field is required", ANNOTATION_NAME, - context)); + String.format( + "Failed to validate @%s annotation declared at %s: 'key' field is required", + ANNOTATION_NAME, context)); } - return new TimeseriesFieldCollectionAnnotation(collectionName.orElse(schemaFieldName), key.get()); + return new TimeseriesFieldCollectionAnnotation( + collectionName.orElse(schemaFieldName), key.get()); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java index 720eb87ec5c0e..b0ff6459ffbee 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/AspectExtractor.java @@ -10,29 +10,31 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ @Slf4j public class AspectExtractor { - private AspectExtractor() { - } + private AspectExtractor() {} public static Map extractAspectRecords(RecordTemplate snapshot) { - return ModelUtils.getAspectsFromSnapshot(snapshot) - .stream() - .collect(Collectors.toMap(record -> getAspectNameFromSchema(record.schema()), Function.identity())); + return ModelUtils.getAspectsFromSnapshot(snapshot).stream() + .collect( + Collectors.toMap( + record -> getAspectNameFromSchema(record.schema()), Function.identity())); } private static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java index 6cc4fa4cd362d..899f66e66ea5a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/extractor/FieldExtractor.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.models.extractor; +import com.datahub.util.RecordUtils; import com.linkedin.data.schema.PathSpec; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.FieldSpec; @@ -16,28 +16,26 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. - */ +/** Extracts fields from a RecordTemplate based on the appropriate {@link FieldSpec}. */ public class FieldExtractor { private static final String ARRAY_WILDCARD = "*"; private static final int MAX_VALUE_LENGTH = 200; - private FieldExtractor() { - } + private FieldExtractor() {} private static long getNumArrayWildcards(PathSpec pathSpec) { return pathSpec.getPathComponents().stream().filter(ARRAY_WILDCARD::equals).count(); } // Extract the value of each field in the field specs from the input record - public static Map> extractFields(@Nonnull RecordTemplate record, List fieldSpecs) { + public static Map> extractFields( + @Nonnull RecordTemplate record, List fieldSpecs) { return extractFields(record, fieldSpecs, MAX_VALUE_LENGTH); } - public static Map> extractFields(@Nonnull RecordTemplate record, List fieldSpecs, int maxValueLength) { + public static Map> extractFields( + @Nonnull RecordTemplate record, List fieldSpecs, int maxValueLength) { final Map> extractedFields = new HashMap<>(); for (T fieldSpec : fieldSpecs) { Optional value = RecordUtils.getFieldValue(record, fieldSpec.getPath()); @@ -49,12 +47,16 @@ public static Map> extractFields(@Nonnull if (numArrayWildcards == 0) { // For maps, convert it into a list of the form key=value (Filter out long values) if (value.get() instanceof Map) { - extractedFields.put(fieldSpec, ((Map) value.get()).entrySet() - .stream() - .map(entry -> new Pair<>(entry.getKey().toString(), entry.getValue().toString())) - .filter(entry -> entry.getValue().length() < maxValueLength) - .map(entry -> entry.getKey() + "=" + entry.getValue()) - .collect(Collectors.toList())); + extractedFields.put( + fieldSpec, + ((Map) value.get()) + .entrySet().stream() + .map( + entry -> + new Pair<>(entry.getKey().toString(), entry.getValue().toString())) + .filter(entry -> entry.getValue().length() < maxValueLength) + .map(entry -> entry.getKey() + "=" + entry.getValue()) + .collect(Collectors.toList())); } else { extractedFields.put(fieldSpec, Collections.singletonList(value.get())); } @@ -62,7 +64,10 @@ public static Map> extractFields(@Nonnull List valueList = (List) value.get(); // If the field is a nested list of values, flatten it for (int i = 0; i < numArrayWildcards - 1; i++) { - valueList = valueList.stream().flatMap(v -> ((List) v).stream()).collect(Collectors.toList()); + valueList = + valueList.stream() + .flatMap(v -> ((List) v).stream()) + .collect(Collectors.toList()); } extractedFields.put(fieldSpec, valueList); } @@ -71,14 +76,20 @@ public static Map> extractFields(@Nonnull return extractedFields; } - public static Map> extractFieldsFromSnapshot(RecordTemplate snapshot, - EntitySpec entitySpec, Function> getFieldSpecsFunc, int maxValueLength) { + public static Map> extractFieldsFromSnapshot( + RecordTemplate snapshot, + EntitySpec entitySpec, + Function> getFieldSpecsFunc, + int maxValueLength) { final Map aspects = AspectExtractor.extractAspectRecords(snapshot); final Map> extractedFields = new HashMap<>(); - aspects.keySet() - .stream() - .map(aspectName -> FieldExtractor.extractFields(aspects.get(aspectName), - getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), maxValueLength)) + aspects.keySet().stream() + .map( + aspectName -> + FieldExtractor.extractFields( + aspects.get(aspectName), + getFieldSpecsFunc.apply(entitySpec.getAspectSpec(aspectName)), + maxValueLength)) .forEach(extractedFields::putAll); return extractedFields; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java index 95195620cf85a..fba916abd2430 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/ConfigEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,9 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from an + * entity registry config yaml file */ @Slf4j public class ConfigEntityRegistry implements EntityRegistry { @@ -51,37 +50,55 @@ public class ConfigEntityRegistry implements EntityRegistry { private final Map _aspectNameToSpec; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public ConfigEntityRegistry(Pair configFileClassPathPair) throws IOException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst()); + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst()); } - public ConfigEntityRegistry(String entityRegistryRoot) throws EntityRegistryException, IOException { + public ConfigEntityRegistry(String entityRegistryRoot) + throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot)); } - private static Pair getFileAndClassPath(String entityRegistryRoot) throws IOException, EntityRegistryException { + private static Pair getFileAndClassPath(String entityRegistryRoot) + throws IOException, EntityRegistryException { Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity_registry.yml or entity_registry.yaml in the root folder - List yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity_registry.yaml/yml) under %s", entityRegistryRootLoc)); + String.format( + "Did not find an entity registry (entity_registry.yaml/yml) under %s", + entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", - entityRegistryRootLoc, yamlFiles.get(0)); + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, + yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -94,7 +111,8 @@ public ConfigEntityRegistry(InputStream configFileInputStream) { this(DataSchemaFactory.getInstance(), configFileInputStream); } - public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) throws FileNotFoundException { + public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath) + throws FileNotFoundException { this(dataSchemaFactory, new FileInputStream(configFilePath.toString())); } @@ -106,7 +124,8 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -120,12 +139,16 @@ public ConfigEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con for (Entity entity : entities.getEntities()) { List aspectSpecs = new ArrayList<>(); aspectSpecs.add(buildAspectSpec(entity.getKeyAspect(), entitySpecBuilder)); - entity.getAspects().forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); + entity + .getAspects() + .forEach(aspect -> aspectSpecs.add(buildAspectSpec(aspect, entitySpecBuilder))); EntitySpec entitySpec; Optional entitySchema = dataSchemaFactory.getEntitySchema(entity.getName()); if (!entitySchema.isPresent()) { - entitySpec = entitySpecBuilder.buildConfigEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpec = + entitySpecBuilder.buildConfigEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); } else { entitySpec = entitySpecBuilder.buildEntitySpec(entitySchema.get(), aspectSpecs); } @@ -210,7 +233,7 @@ public Map getEventSpecs() { @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: add support for config based aspect templates + // TODO: add support for config based aspect templates return new AspectTemplateEngine(); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java index cf9ca68d0ee4f..8c415d56f0d5f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistry.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * The Entity Registry provides a mechanism to retrieve metadata about entities modeled in GMA. * Metadata includes the entity's common name, the aspects that comprise it, and search index + @@ -23,8 +22,10 @@ default String getIdentifier() { /** * Given an entity name, returns an instance of {@link DefaultEntitySpec} + * * @param entityName the name of the entity to be retrieved - * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEntitySpec} corresponding to the entity name provided, null if none + * exists. */ @Nonnull EntitySpec getEntitySpec(@Nonnull final String entityName); @@ -33,34 +34,36 @@ default String getIdentifier() { * Given an event name, returns an instance of {@link DefaultEventSpec}. * * @param eventName the name of the event to be retrieved - * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none exists. + * @return an {@link DefaultEventSpec} corresponding to the entity name provided, null if none + * exists. */ @Nullable EventSpec getEventSpec(@Nonnull final String eventName); /** * Returns all {@link DefaultEntitySpec}s that the registry is aware of. + * * @return a map of String to {@link DefaultEntitySpec}s, empty map if none exists. */ @Nonnull Map getEntitySpecs(); - /** * Returns all {@link AspectSpec}s that the registry is aware of. + * * @return a map of String to {@link AspectSpec}s, empty map if none exists. */ @Nonnull Map getAspectSpecs(); - /** - * Returns all {@link EventSpec}s that the registry is aware of. - */ + /** Returns all {@link EventSpec}s that the registry is aware of. */ @Nonnull Map getEventSpecs(); /** - * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link com.linkedin.metadata.models.AspectSpec}s + * Returns an {@link AspectTemplateEngine} that is used for generating templates from {@link + * com.linkedin.metadata.models.AspectSpec}s + * * @return a template engine instance associated with this registry */ @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java index d43782ce0f07f..8d108445e67be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/EntityRegistryUtils.java @@ -8,17 +8,17 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class EntityRegistryUtils { - private EntityRegistryUtils() { - - } + private EntityRegistryUtils() {} public static Map populateAspectMap(List entitySpecs) { return entitySpecs.stream() .map(EntitySpec::getAspectSpecs) .flatMap(Collection::stream) - .collect(Collectors.toMap(AspectSpec::getName, Function.identity(), (aspectSpec1, aspectSpec2) -> aspectSpec1)); + .collect( + Collectors.toMap( + AspectSpec::getName, + Function.identity(), + (aspectSpec1, aspectSpec2) -> aspectSpec1)); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java index 345d5aa02f398..2a5d09db00396 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java @@ -19,10 +19,10 @@ import lombok.Value; import org.apache.commons.lang3.tuple.Triple; - /** - * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships between different entities - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * The Lineage Registry provides a mechanism to retrieve metadata about the lineage relationships + * between different entities Lineage relationship denotes whether an entity is directly upstream or + * downstream of another entity */ public class LineageRegistry { @@ -35,55 +35,73 @@ public LineageRegistry(EntityRegistry entityRegistry) { } private Map buildLineageSpecs(EntityRegistry entityRegistry) { - // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, isUpstream) - Collection lineageEdges = entityRegistry.getEntitySpecs() - .entrySet() - .stream() - .flatMap(entry -> entry.getValue() - .getRelationshipFieldSpecs() - .stream() + // 1. Flatten relationship annotations into a list of lineage edges (source, dest, type, + // isUpstream) + Collection lineageEdges = + entityRegistry.getEntitySpecs().entrySet().stream() .flatMap( - spec -> getLineageEdgesFromRelationshipAnnotation(entry.getKey(), spec.getRelationshipAnnotation()))) - // If there are multiple edges with the same source, dest, edge type, get one of them - .collect(Collectors.toMap(edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), - Function.identity(), (x1, x2) -> x1)) - .values(); + entry -> + entry.getValue().getRelationshipFieldSpecs().stream() + .flatMap( + spec -> + getLineageEdgesFromRelationshipAnnotation( + entry.getKey(), spec.getRelationshipAnnotation()))) + // If there are multiple edges with the same source, dest, edge type, get one of them + .collect( + Collectors.toMap( + edge -> Triple.of(edge.getSourceEntity(), edge.getDestEntity(), edge.getType()), + Function.identity(), + (x1, x2) -> x1)) + .values(); // 2. Figure out the upstream and downstream edges of each entity type Map> upstreamPerEntity = new HashMap<>(); Map> downstreamPerEntity = new HashMap<>(); - // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, INCOMING) + // A downstreamOf B : A -> upstream (downstreamOf, OUTGOING), B -> downstream (downstreamOf, + // INCOMING) // A produces B : A -> downstream (produces, OUTGOING), B -> upstream (produces, INCOMING) for (LineageEdge edge : lineageEdges) { if (edge.isUpstream()) { - upstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - downstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } else { - downstreamPerEntity.computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) + downstreamPerEntity + .computeIfAbsent(edge.sourceEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.OUTGOING, edge.destEntity)); - upstreamPerEntity.computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) + upstreamPerEntity + .computeIfAbsent(edge.destEntity.toLowerCase(), (k) -> new HashSet<>()) .add(new EdgeInfo(edge.type, RelationshipDirection.INCOMING, edge.sourceEntity)); } } - return entityRegistry.getEntitySpecs() - .keySet() - .stream() - .collect(Collectors.toMap(String::toLowerCase, entityName -> new LineageSpec( - new ArrayList<>(upstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet())), - new ArrayList<>(downstreamPerEntity.getOrDefault(entityName.toLowerCase(), Collections.emptySet()))))); + return entityRegistry.getEntitySpecs().keySet().stream() + .collect( + Collectors.toMap( + String::toLowerCase, + entityName -> + new LineageSpec( + new ArrayList<>( + upstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet())), + new ArrayList<>( + downstreamPerEntity.getOrDefault( + entityName.toLowerCase(), Collections.emptySet()))))); } - private Stream getLineageEdgesFromRelationshipAnnotation(String sourceEntity, - RelationshipAnnotation annotation) { + private Stream getLineageEdgesFromRelationshipAnnotation( + String sourceEntity, RelationshipAnnotation annotation) { if (!annotation.isLineage()) { return Stream.empty(); } - return annotation.getValidDestinationTypes() - .stream() - .map(destEntity -> new LineageEdge(sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); + return annotation.getValidDestinationTypes().stream() + .map( + destEntity -> + new LineageEdge( + sourceEntity, destEntity, annotation.getName(), annotation.isUpstream())); } public LineageSpec getLineageSpec(String entityName) { @@ -92,11 +110,13 @@ public LineageSpec getLineageSpec(String entityName) { public Set getEntitiesWithLineageToEntityType(String entityType) { Map specs = _entityRegistry.getEntitySpecs(); - Set result = Streams.concat(_lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), - _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) - .map(EdgeInfo::getOpposingEntityType) - .map(entity -> specs.get(entity.toLowerCase()).getName()) - .collect(Collectors.toSet()); + Set result = + Streams.concat( + _lineageSpecMap.get(entityType.toLowerCase()).getDownstreamEdges().stream(), + _lineageSpecMap.get(entityType.toLowerCase()).getUpstreamEdges().stream()) + .map(EdgeInfo::getOpposingEntityType) + .map(entity -> specs.get(entity.toLowerCase()).getName()) + .collect(Collectors.toSet()); result.add(entityType); return result; } @@ -120,9 +140,11 @@ public List getLineageRelationships(String entityName, LineageDirectio private List getSchemaFieldRelationships(LineageDirection direction) { List schemaFieldEdges = new ArrayList<>(); if (direction == LineageDirection.UPSTREAM) { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "schemafield")); } else { - schemaFieldEdges.add(new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); + schemaFieldEdges.add( + new EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "schemafield")); } return schemaFieldEdges; } @@ -165,8 +187,9 @@ public boolean equals(Object o) { public int hashCode() { return ((this.type == null ? 0 : this.type.toLowerCase().hashCode()) ^ (this.direction == null ? 0 : this.direction.hashCode()) - ^ (this.opposingEntityType == null ? 0 : this.opposingEntityType.toLowerCase().hashCode())); + ^ (this.opposingEntityType == null + ? 0 + : this.opposingEntityType.toLowerCase().hashCode())); } } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java index f0ec57b8d81c3..06aeefc2e5aa0 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/MergedEntityRegistry.java @@ -19,10 +19,7 @@ import lombok.Setter; import lombok.extern.slf4j.Slf4j; - -/** - * Combines results from two entity registries, where the second takes precedence - */ +/** Combines results from two entity registries, where the second takes precedence */ @Slf4j public class MergedEntityRegistry implements EntityRegistry { @@ -34,8 +31,14 @@ public class MergedEntityRegistry implements EntityRegistry { public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { // baseEntityRegistry.get*Specs() can return immutable Collections.emptyMap() which fails // when this class attempts .put* operations on it. - entityNameToSpec = baseEntityRegistry.getEntitySpecs() != null ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) : new HashMap<>(); - eventNameToSpec = baseEntityRegistry.getEventSpecs() != null ? new HashMap<>(baseEntityRegistry.getEventSpecs()) : new HashMap<>(); + entityNameToSpec = + baseEntityRegistry.getEntitySpecs() != null + ? new HashMap<>(baseEntityRegistry.getEntitySpecs()) + : new HashMap<>(); + eventNameToSpec = + baseEntityRegistry.getEventSpecs() != null + ? new HashMap<>(baseEntityRegistry.getEventSpecs()) + : new HashMap<>(); baseEntityRegistry.getAspectTemplateEngine(); _aspectTemplateEngine = baseEntityRegistry.getAspectTemplateEngine(); _aspectNameToSpec = baseEntityRegistry.getAspectSpecs(); @@ -44,22 +47,28 @@ public MergedEntityRegistry(EntityRegistry baseEntityRegistry) { private void validateEntitySpec(EntitySpec entitySpec, final ValidationResult validationResult) { if (entitySpec.getKeyAspectSpec() == null) { validationResult.setValid(false); - validationResult.getValidationFailures().add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); + validationResult + .getValidationFailures() + .add(String.format("Key aspect is missing in entity {}", entitySpec.getName())); } } - public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws EntityRegistryException { + public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) + throws EntityRegistryException { ValidationResult validationResult = validatePatch(patchEntityRegistry); if (!validationResult.isValid()) { - throw new EntityRegistryException(String.format("Failed to validate new registry with %s", validationResult.validationFailures.stream().collect( - Collectors.joining("\n")))); + throw new EntityRegistryException( + String.format( + "Failed to validate new registry with %s", + validationResult.validationFailures.stream().collect(Collectors.joining("\n")))); } // Merge Entity Specs for (Map.Entry e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { if (entityNameToSpec.containsKey(e2Entry.getKey())) { - EntitySpec mergeEntitySpec = mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); + EntitySpec mergeEntitySpec = + mergeEntitySpecs(entityNameToSpec.get(e2Entry.getKey()), e2Entry.getValue()); entityNameToSpec.put(e2Entry.getKey(), mergeEntitySpec); } else { // We are inserting a new entity into the registry @@ -71,41 +80,63 @@ public MergedEntityRegistry apply(EntityRegistry patchEntityRegistry) throws Ent if (patchEntityRegistry.getEventSpecs().size() > 0) { eventNameToSpec.putAll(patchEntityRegistry.getEventSpecs()); } - //TODO: Validate that the entity registries don't have conflicts among each other + // TODO: Validate that the entity registries don't have conflicts among each other return this; } private ValidationResult validatePatch(EntityRegistry patchEntityRegistry) { ValidationResult validationResult = new ValidationResult(); for (Map.Entry e2Entry : patchEntityRegistry.getEntitySpecs().entrySet()) { - checkMergeable(entityNameToSpec.getOrDefault(e2Entry.getKey(), null), e2Entry.getValue(), validationResult); + checkMergeable( + entityNameToSpec.getOrDefault(e2Entry.getKey(), null), + e2Entry.getValue(), + validationResult); } return validationResult; } - private void checkMergeable(EntitySpec existingEntitySpec, EntitySpec newEntitySpec, final ValidationResult validationResult) { + private void checkMergeable( + EntitySpec existingEntitySpec, + EntitySpec newEntitySpec, + final ValidationResult validationResult) { if (existingEntitySpec != null) { - existingEntitySpec.getAspectSpecMap().entrySet().forEach(aspectSpecEntry -> { - if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { - CompatibilityResult result = CompatibilityChecker.checkCompatibility(aspectSpecEntry.getValue().getPegasusSchema(), newEntitySpec.getAspectSpec( - aspectSpecEntry.getKey()).getPegasusSchema(), new CompatibilityOptions()); - if (result.isError()) { - log.error("{} schema is not compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - // we want to continue processing all aspects to collect all failures - validationResult.setValid(false); - validationResult.getValidationFailures().add( - String.format("%s schema is not compatible with previous schema due to %s", aspectSpecEntry.getKey(), result.getMessages())); - } else { - log.info("{} schema is compatible with previous schema due to {}", aspectSpecEntry.getKey(), result.getMessages()); - } - } - }); + existingEntitySpec + .getAspectSpecMap() + .entrySet() + .forEach( + aspectSpecEntry -> { + if (newEntitySpec.hasAspect(aspectSpecEntry.getKey())) { + CompatibilityResult result = + CompatibilityChecker.checkCompatibility( + aspectSpecEntry.getValue().getPegasusSchema(), + newEntitySpec.getAspectSpec(aspectSpecEntry.getKey()).getPegasusSchema(), + new CompatibilityOptions()); + if (result.isError()) { + log.error( + "{} schema is not compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + // we want to continue processing all aspects to collect all failures + validationResult.setValid(false); + validationResult + .getValidationFailures() + .add( + String.format( + "%s schema is not compatible with previous schema due to %s", + aspectSpecEntry.getKey(), result.getMessages())); + } else { + log.info( + "{} schema is compatible with previous schema due to {}", + aspectSpecEntry.getKey(), + result.getMessages()); + } + } + }); } else { validateEntitySpec(newEntitySpec, validationResult); } } - private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec newEntitySpec) { Map aspectSpecMap = new HashMap<>(existingEntitySpec.getAspectSpecMap()); aspectSpecMap.putAll(newEntitySpec.getAspectSpecMap()); @@ -116,8 +147,11 @@ private EntitySpec mergeEntitySpecs(EntitySpec existingEntitySpec, EntitySpec ne existingEntitySpec.getEntityAnnotation().getKeyAspect(), aspectSpecMap.values()); } - return new DefaultEntitySpec(aspectSpecMap.values(), existingEntitySpec.getEntityAnnotation(), - existingEntitySpec.getSnapshotSchema(), existingEntitySpec.getAspectTyperefSchema()); + return new DefaultEntitySpec( + aspectSpecMap.values(), + existingEntitySpec.getEntityAnnotation(), + existingEntitySpec.getSnapshotSchema(), + existingEntitySpec.getAspectTyperefSchema()); } @Nonnull diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java index 76d9c8ceb089c..9eafbe05a4fc6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PatchEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -32,13 +35,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but different in one important way. - * It builds potentially partially specified {@link com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file + * Implementation of {@link EntityRegistry} that is similar to {@link ConfigEntityRegistry} but + * different in one important way. It builds potentially partially specified {@link + * com.linkedin.metadata.models.PartialEntitySpec} objects from an entity registry config yaml file */ @Slf4j public class PatchEntityRegistry implements EntityRegistry { @@ -53,37 +53,50 @@ public class PatchEntityRegistry implements EntityRegistry { private final String identifier; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Override public String toString() { StringBuilder sb = new StringBuilder("PatchEntityRegistry[" + "identifier=" + identifier + ';'); - entityNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[entityName=") - .append(entry.getKey()) - .append(";aspects=[") - .append( - entry.getValue().getAspectSpecs().stream().map(spec -> spec.getName()).collect(Collectors.joining(","))) - .append("]]")); - eventNameToSpec.entrySet() - .stream() - .forEach(entry -> sb.append("[eventName=") - .append(entry.getKey()) - .append("]")); + entityNameToSpec.entrySet().stream() + .forEach( + entry -> + sb.append("[entityName=") + .append(entry.getKey()) + .append(";aspects=[") + .append( + entry.getValue().getAspectSpecs().stream() + .map(spec -> spec.getName()) + .collect(Collectors.joining(","))) + .append("]]")); + eventNameToSpec.entrySet().stream() + .forEach(entry -> sb.append("[eventName=").append(entry.getKey()).append("]")); return sb.toString(); } - public PatchEntityRegistry(Pair configFileClassPathPair, String registryName, - ComparableVersion registryVersion) throws IOException, EntityRegistryException { - this(DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), configFileClassPathPair.getFirst(), - registryName, registryVersion); + public PatchEntityRegistry( + Pair configFileClassPathPair, + String registryName, + ComparableVersion registryVersion) + throws IOException, EntityRegistryException { + this( + DataSchemaFactory.withCustomClasspath(configFileClassPathPair.getSecond()), + configFileClassPathPair.getFirst(), + registryName, + registryVersion); } - public PatchEntityRegistry(String entityRegistryRoot, String registryName, ComparableVersion registryVersion) + public PatchEntityRegistry( + String entityRegistryRoot, String registryName, ComparableVersion registryVersion) throws EntityRegistryException, IOException { this(getFileAndClassPath(entityRegistryRoot), registryName, registryVersion); } @@ -93,21 +106,28 @@ private static Pair getFileAndClassPath(String entityRegistryRoot) Path entityRegistryRootLoc = Paths.get(entityRegistryRoot); if (Files.isDirectory(entityRegistryRootLoc)) { // Look for entity-registry.yml or entity-registry.yaml in the root folder - List yamlFiles = Files.walk(entityRegistryRootLoc, 1) - .filter(Files::isRegularFile) - .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) - .collect(Collectors.toList()); + List yamlFiles = + Files.walk(entityRegistryRootLoc, 1) + .filter(Files::isRegularFile) + .filter(f -> f.endsWith("entity-registry.yml") || f.endsWith("entity-registry.yaml")) + .collect(Collectors.toList()); if (yamlFiles.size() == 0) { throw new EntityRegistryException( - String.format("Did not find an entity registry (entity-registry.yaml/yml) under %s", + String.format( + "Did not find an entity registry (entity-registry.yaml/yml) under %s", entityRegistryRootLoc)); } if (yamlFiles.size() > 1) { - log.warn("Found more than one yaml file in the directory {}. Will pick the first {}", entityRegistryRootLoc, + log.warn( + "Found more than one yaml file in the directory {}. Will pick the first {}", + entityRegistryRootLoc, yamlFiles.get(0)); } Path entityRegistryFile = yamlFiles.get(0); - log.info("Loading custom config entity file: {}, dir: {}", entityRegistryFile, entityRegistryRootLoc); + log.info( + "Loading custom config entity file: {}, dir: {}", + entityRegistryFile, + entityRegistryRootLoc); return new Pair<>(entityRegistryFile, entityRegistryRootLoc); } else { // We assume that the file being passed in is a bare entity registry yaml file @@ -116,13 +136,25 @@ private static Pair getFileAndClassPath(String entityRegistryRoot) } } - public PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, Path configFilePath, String registryName, - ComparableVersion registryVersion) throws FileNotFoundException, EntityRegistryException { - this(dataSchemaFactory, new FileInputStream(configFilePath.toString()), registryName, registryVersion); + public PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + Path configFilePath, + String registryName, + ComparableVersion registryVersion) + throws FileNotFoundException, EntityRegistryException { + this( + dataSchemaFactory, + new FileInputStream(configFilePath.toString()), + registryName, + registryVersion); } - private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream configFileStream, String registryName, - ComparableVersion registryVersion) throws EntityRegistryException { + private PatchEntityRegistry( + DataSchemaFactory dataSchemaFactory, + InputStream configFileStream, + String registryName, + ComparableVersion registryVersion) + throws EntityRegistryException { this.dataSchemaFactory = dataSchemaFactory; this.registryName = registryName; this.registryVersion = registryVersion; @@ -133,7 +165,8 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con } catch (IOException e) { e.printStackTrace(); throw new IllegalArgumentException( - String.format("Error while reading config file in path %s: %s", configFileStream, e.getMessage())); + String.format( + "Error while reading config file in path %s: %s", configFileStream, e.getMessage())); } if (entities.getId() != null) { identifier = entities.getId(); @@ -144,7 +177,9 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con // Build Entity Specs EntitySpecBuilder entitySpecBuilder = new EntitySpecBuilder(); for (Entity entity : entities.getEntities()) { - log.info("Discovered entity {} with aspects {}", entity.getName(), + log.info( + "Discovered entity {} with aspects {}", + entity.getName(), entity.getAspects().stream().collect(Collectors.joining())); List aspectSpecs = new ArrayList<>(); if (entity.getKeyAspect() != null) { @@ -152,16 +187,20 @@ private PatchEntityRegistry(DataSchemaFactory dataSchemaFactory, InputStream con log.info("Adding key aspect {} with spec {}", entity.getKeyAspect(), keyAspectSpec); aspectSpecs.add(keyAspectSpec); } - entity.getAspects().forEach(aspect -> { - if (!aspect.equals(entity.getKeyAspect())) { - AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); - log.info("Adding aspect {} with spec {}", aspect, aspectSpec); - aspectSpecs.add(aspectSpec); - } - }); + entity + .getAspects() + .forEach( + aspect -> { + if (!aspect.equals(entity.getKeyAspect())) { + AspectSpec aspectSpec = buildAspectSpec(aspect, entitySpecBuilder); + log.info("Adding aspect {} with spec {}", aspect, aspectSpec); + aspectSpecs.add(aspectSpec); + } + }); EntitySpec entitySpec = - entitySpecBuilder.buildPartialEntitySpec(entity.getName(), entity.getKeyAspect(), aspectSpecs); + entitySpecBuilder.buildPartialEntitySpec( + entity.getName(), entity.getKeyAspect(), aspectSpecs); entityNameToSpec.put(entity.getName().toLowerCase(), entitySpec); } @@ -225,7 +264,7 @@ public Map getEventSpecs() { @Nonnull @Override public AspectTemplateEngine getAspectTemplateEngine() { - //TODO: support patch based templates + // TODO: support patch based templates return new AspectTemplateEngine(); } @@ -236,7 +275,8 @@ private AspectSpec buildAspectSpec(String aspectName, EntitySpecBuilder entitySp if (!aspectSchema.isPresent()) { throw new IllegalArgumentException(String.format("Aspect %s does not exist", aspectName)); } - AspectSpec aspectSpec = entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); + AspectSpec aspectSpec = + entitySpecBuilder.buildAspectSpec(aspectSchema.get(), aspectClass.get()); aspectSpec.setRegistryName(this.registryName); aspectSpec.setRegistryVersion(this.registryVersion); return aspectSpec; @@ -249,5 +289,4 @@ private EventSpec buildEventSpec(String eventName) { } return new EventSpecBuilder().buildEventSpec(eventName, eventSchema.get()); } - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java index 4809b1f4d2f21..05c752a5c1575 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoader.java @@ -23,14 +23,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class PluginEntityRegistryLoader { private static int _MAXLOADFAILURES = 5; private final Boolean scanningEnabled; private final String pluginDirectory; // Registry Name -> Registry Version -> (Registry, LoadResult) - private final Map>> patchRegistries; + private final Map>> + patchRegistries; private MergedEntityRegistry mergedEntityRegistry; private boolean started = false; private final Lock lock = new ReentrantLock(); @@ -41,7 +41,9 @@ public class PluginEntityRegistryLoader { public PluginEntityRegistryLoader(String pluginDirectory) { File directory = new File(pluginDirectory); if (!directory.exists() || !directory.isDirectory()) { - log.warn("{} directory does not exist or is not a directory. Plugin scanning will be disabled.", directory); + log.warn( + "{} directory does not exist or is not a directory. Plugin scanning will be disabled.", + directory); scanningEnabled = false; } else { scanningEnabled = true; @@ -50,7 +52,8 @@ public PluginEntityRegistryLoader(String pluginDirectory) { this.patchRegistries = new HashMap<>(); } - public Map>> getPatchRegistries() { + public Map>> + getPatchRegistries() { return patchRegistries; } @@ -59,7 +62,8 @@ public PluginEntityRegistryLoader withBaseRegistry(MergedEntityRegistry baseEnti return this; } - public PluginEntityRegistryLoader start(boolean waitForInitialization) throws InterruptedException { + public PluginEntityRegistryLoader start(boolean waitForInitialization) + throws InterruptedException { if (started) { log.warn("Already started!. Skipping"); return this; @@ -68,45 +72,69 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - executorService.scheduleAtFixedRate(() -> { - lock.lock(); - try { - Path rootPath = Paths.get(this.pluginDirectory); - int rootDepth = rootPath.getNameCount(); - List paths = - Files.walk(rootPath, 2).filter(x -> x.getNameCount() - rootDepth == 2).collect(Collectors.toList()); - log.debug("Size of list {}", paths.size()); - log.debug("Paths : {}", paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); - List versionedPaths = paths.stream().filter(path -> { + executorService.scheduleAtFixedRate( + () -> { + lock.lock(); try { - ComparableVersion comparableVersion = new ComparableVersion(path.getName(rootDepth + 1).toString()); - return true; + Path rootPath = Paths.get(this.pluginDirectory); + int rootDepth = rootPath.getNameCount(); + List paths = + Files.walk(rootPath, 2) + .filter(x -> x.getNameCount() - rootDepth == 2) + .collect(Collectors.toList()); + log.debug("Size of list {}", paths.size()); + log.debug( + "Paths : {}", + paths.stream().map(x -> x.toString() + ";").collect(Collectors.joining())); + List versionedPaths = + paths.stream() + .filter( + path -> { + try { + ComparableVersion comparableVersion = + new ComparableVersion(path.getName(rootDepth + 1).toString()); + return true; + } catch (Exception e) { + log.warn( + String.format( + "Will skip %s since we weren't able to parse a legal version from it", + path.toString())); + return false; + } + }) + .sorted( + (path1, path2) -> { + if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { + return new ComparableVersion(path1.getName(rootDepth + 1).toString()) + .compareTo( + new ComparableVersion(path2.getName(rootDepth + 1).toString())); + } else { + return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + } + }) + .collect(Collectors.toList()); + log.debug( + "Will be loading paths in this order {}", + versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); + + versionedPaths.forEach( + x -> + loadOneRegistry( + this.mergedEntityRegistry, + x.getName(rootDepth).toString(), + x.getName(rootDepth + 1).toString(), + x.toString())); } catch (Exception e) { - log.warn( - String.format("Will skip %s since we weren't able to parse a legal version from it", path.toString())); - return false; - } - }).sorted((path1, path2) -> { - if (path1.getName(rootDepth).equals(path2.getName(rootDepth))) { - return new ComparableVersion(path1.getName(rootDepth + 1).toString()).compareTo( - new ComparableVersion(path2.getName(rootDepth + 1).toString())); - } else { - return path1.getName(rootDepth).compareTo(path2.getName(rootDepth)); + log.warn("Failed to walk directory with exception", e); + } finally { + booted = true; + initialized.signal(); + lock.unlock(); } - }).collect(Collectors.toList()); - log.debug("Will be loading paths in this order {}", - versionedPaths.stream().map(p -> p.toString()).collect(Collectors.joining(";"))); - - versionedPaths.forEach(x -> loadOneRegistry(this.mergedEntityRegistry, x.getName(rootDepth).toString(), - x.getName(rootDepth + 1).toString(), x.toString())); - } catch (Exception e) { - log.warn("Failed to walk directory with exception", e); - } finally { - booted = true; - initialized.signal(); - lock.unlock(); - } - }, 0, 5, TimeUnit.SECONDS); + }, + 0, + 5, + TimeUnit.SECONDS); started = true; if (waitForInitialization) { lock.lock(); @@ -121,7 +149,10 @@ public PluginEntityRegistryLoader start(boolean waitForInitialization) throws In return this; } - private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registryName, String registryVersionStr, + private void loadOneRegistry( + MergedEntityRegistry parentRegistry, + String registryName, + String registryVersionStr, String patchDirectory) { ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); try { @@ -129,11 +160,15 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr log.debug("{}: Found registry version {}", this, maybeVersion); registryVersion = maybeVersion; } catch (IllegalArgumentException ie) { - log.warn("Found un-parseable registry version {}, will default to {}", registryVersionStr, registryVersion); + log.warn( + "Found un-parseable registry version {}, will default to {}", + registryVersionStr, + registryVersion); } if (registryExists(registryName, registryVersion)) { - log.debug("Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); + log.debug( + "Registry {}:{} already exists. Skipping loading...", registryName, registryVersion); return; } else { log.info("{}: Registry {}:{} discovered. Loading...", this, registryName, registryVersion); @@ -160,31 +195,39 @@ private void loadOneRegistry(MergedEntityRegistry parentRegistry, String registr private boolean registryExists(String registryName, ComparableVersion registryVersion) { Map> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); - if (nameTree.containsKey(registryVersion) && ( - (nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) || ( - nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { + if (nameTree.containsKey(registryVersion) + && ((nameTree.get(registryVersion).getSecond().getLoadResult() == LoadStatus.SUCCESS) + || (nameTree.get(registryVersion).getSecond().getFailureCount() == _MAXLOADFAILURES))) { return true; } return false; } - private void addLoadResult(String registryName, ComparableVersion semanticVersion, - EntityRegistryLoadResult loadResult, EntityRegistry e) { + private void addLoadResult( + String registryName, + ComparableVersion semanticVersion, + EntityRegistryLoadResult loadResult, + EntityRegistry e) { Map> nameTree = patchRegistries.getOrDefault(registryName, new HashMap<>()); if (nameTree.containsKey(semanticVersion)) { - if ((loadResult.getLoadResult() == LoadStatus.FAILURE) && ( - nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { + if ((loadResult.getLoadResult() == LoadStatus.FAILURE) + && (nameTree.get(semanticVersion).getSecond().getLoadResult() == LoadStatus.FAILURE)) { // previous load and current loads are both failures loadResult.setFailureCount(nameTree.get(semanticVersion).getSecond().getFailureCount() + 1); if (loadResult.getFailureCount() == _MAXLOADFAILURES) { // Abandoning this registry version forever - log.error("Tried {} times. Failed to load registry {} with {}", loadResult.getFailureCount(), registryName, loadResult.getFailureReason()); + log.error( + "Tried {} times. Failed to load registry {} with {}", + loadResult.getFailureCount(), + registryName, + loadResult.getFailureReason()); } } log.warn( - String.format("Attempt %d to re-load registry %s: %s", loadResult.getFailureCount(), - registryName, semanticVersion)); + String.format( + "Attempt %d to re-load registry %s: %s", + loadResult.getFailureCount(), registryName, semanticVersion)); } nameTree.put(semanticVersion, new Pair<>(e, loadResult)); patchRegistries.put(registryName, nameTree); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java index 32738d65573fd..cfc2c0901ce0d 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/SnapshotEntityRegistry.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import com.linkedin.metadata.models.AspectSpec; @@ -27,13 +30,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.EntityRegistryUtils.*; - - /** - * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects - * from the a {@link Snapshot} Record Template present on the classpath + * Implementation of {@link EntityRegistry} that builds {@link DefaultEntitySpec} objects from the a + * {@link Snapshot} Record Template present on the classpath */ public class SnapshotEntityRegistry implements EntityRegistry { @@ -45,36 +44,41 @@ public class SnapshotEntityRegistry implements EntityRegistry { private static final SnapshotEntityRegistry INSTANCE = new SnapshotEntityRegistry(); public SnapshotEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } public SnapshotEntityRegistry(UnionTemplate snapshot) { - entityNameToSpec = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder() + .buildEntitySpecs(snapshot.schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); entitySpecs = new ArrayList<>(entityNameToSpec.values()); _aspectNameToSpec = populateAspectMap(entitySpecs); _aspectTemplateEngine = populateTemplateEngine(_aspectNameToSpec); } private AspectTemplateEngine populateTemplateEngine(Map aspectSpecs) { - // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map preemptively + // TODO: This should be more dynamic ideally, "hardcoding" for now, passing in aspect spec map + // preemptively Map> aspectSpecTemplateMap = new HashMap<>(); aspectSpecTemplateMap.put(OWNERSHIP_ASPECT_NAME, new OwnershipTemplate()); aspectSpecTemplateMap.put(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesTemplate()); aspectSpecTemplateMap.put(UPSTREAM_LINEAGE_ASPECT_NAME, new UpstreamLineageTemplate()); aspectSpecTemplateMap.put(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsTemplate()); - aspectSpecTemplateMap.put(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); + aspectSpecTemplateMap.put( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataTemplate()); aspectSpecTemplateMap.put(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsTemplate()); aspectSpecTemplateMap.put(DATA_FLOW_INFO_ASPECT_NAME, new DataFlowInfoTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INFO_ASPECT_NAME, new DataJobInfoTemplate()); - aspectSpecTemplateMap.put(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); + aspectSpecTemplateMap.put( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, new DataProductPropertiesTemplate()); aspectSpecTemplateMap.put(DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, new DataJobInputOutputTemplate()); return new AspectTemplateEngine(aspectSpecTemplateMap); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java index f32aa1aa8bd47..e5d048d6ef647 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Entity.java @@ -1,15 +1,12 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; - +import javax.annotation.Nullable; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; - -import javax.annotation.Nullable; - @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) @@ -21,6 +18,5 @@ public class Entity { String keyAspect; List aspects; - @Nullable - String category; + @Nullable String category; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java index caec5fc69c148..f08fa5ba0a477 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/EntityRegistryLoadResult.java @@ -4,13 +4,11 @@ import lombok.Getter; import lombok.Setter; - @Builder @Getter public class EntityRegistryLoadResult { private LoadStatus loadResult; private String registryLocation; private String failureReason; - @Setter - private int failureCount; + @Setter private int failureCount; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java index 12c9f5ab36a09..4a868ed92e4a7 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/config/Event.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.models.registry.config; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.Value; -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @Value @NoArgsConstructor(force = true, access = AccessLevel.PRIVATE) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java index cf63e87abf7f9..9cd8e74d952d6 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/ArrayMergingTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -8,61 +10,68 @@ import java.util.Collections; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public interface ArrayMergingTemplate extends Template { /** - * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to transform into a map - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes an Array field on the {@link RecordTemplate} subtype along with a set of key fields to + * transform into a map Avoids producing side effects by copying nodes, use resulting node and not + * the original + * * @param baseNode the base unmodified node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} with array fields transformed to maps */ - default JsonNode arrayFieldToMap(JsonNode baseNode, String arrayFieldName, List keyFields) { + default JsonNode arrayFieldToMap( + JsonNode baseNode, String arrayFieldName, List keyFields) { JsonNode transformedNode = baseNode.deepCopy(); JsonNode arrayNode = baseNode.get(arrayFieldName); ObjectNode mapNode = instance.objectNode(); if (arrayNode instanceof ArrayNode) { - ((ArrayNode) arrayNode).elements() - .forEachRemaining(node -> { - ObjectNode keyValue = mapNode; - // Creates nested object of keys with final value being the full value of the node - JsonNode nodeClone = node.deepCopy(); - if (!keyFields.isEmpty()) { - for (String keyField : keyFields) { - String key = node.get(keyField).asText(); - keyValue = keyValue.get(key) == null ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) - : (ObjectNode) keyValue.get(key); + ((ArrayNode) arrayNode) + .elements() + .forEachRemaining( + node -> { + ObjectNode keyValue = mapNode; + // Creates nested object of keys with final value being the full value of the node + JsonNode nodeClone = node.deepCopy(); + if (!keyFields.isEmpty()) { + for (String keyField : keyFields) { + String key = node.get(keyField).asText(); + keyValue = + keyValue.get(key) == null + ? (ObjectNode) keyValue.set(key, instance.objectNode()).get(key) + : (ObjectNode) keyValue.get(key); + } + } else { + // No key fields, assume String array + nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); } - } else { - // No key fields, assume String array - nodeClone = instance.objectNode().set(((TextNode) node).asText(), node); - } - keyValue.setAll((ObjectNode) nodeClone); - } - ); - + keyValue.setAll((ObjectNode) nodeClone); + }); } return ((ObjectNode) transformedNode).set(arrayFieldName, mapNode); } /** - * Takes a transformed map field on the {@link JsonNode} representation along with a set of key fields used to transform into a map - * and rebases it to the original defined format - * Avoids producing side effects by copying nodes, use resulting node and not the original + * Takes a transformed map field on the {@link JsonNode} representation along with a set of key + * fields used to transform into a map and rebases it to the original defined format Avoids + * producing side effects by copying nodes, use resulting node and not the original + * * @param transformedNode the transformed node * @param arrayFieldName name of the array field to be transformed - * @param keyFields subfields of the array object to be used as keys, empty implies the list is just strings to be merged + * @param keyFields subfields of the array object to be used as keys, empty implies the list is + * just strings to be merged * @return the modified {@link JsonNode} formatted consistent with the original schema */ - default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFieldName, List keyFields) { + default JsonNode transformedMapToArray( + JsonNode transformedNode, String arrayFieldName, List keyFields) { JsonNode fieldNode = transformedNode.get(arrayFieldName); if (fieldNode instanceof ArrayNode) { - // We already have an ArrayNode, no need to transform. This happens during `replace` operations + // We already have an ArrayNode, no need to transform. This happens during `replace` + // operations return transformedNode; } ObjectNode rebasedNode = transformedNode.deepCopy(); @@ -74,9 +83,7 @@ default JsonNode transformedMapToArray(JsonNode transformedNode, String arrayFie } else { // No keys, assume pure Strings arrayNode = instance.arrayNode(); - mapNode.fields().forEachRemaining(entry -> - arrayNode.add(entry.getValue()) - ); + mapNode.fields().forEachRemaining(entry -> arrayNode.add(entry.getValue())); } return rebasedNode.set(arrayFieldName, arrayNode); } @@ -86,9 +93,16 @@ default ArrayNode mergeToArray(JsonNode mapNode, List keyFields) { return instance.arrayNode().add(mapNode); } else { ArrayNode mergingArray = instance.arrayNode(); - mapNode.elements().forEachRemaining(node -> - mergingArray.addAll(mergeToArray(node, keyFields.size() > 1 ? keyFields.subList(1, keyFields.size()) : Collections.emptyList())) - ); + mapNode + .elements() + .forEachRemaining( + node -> + mergingArray.addAll( + mergeToArray( + node, + keyFields.size() > 1 + ? keyFields.subList(1, keyFields.size()) + : Collections.emptyList()))); return mergingArray; } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java index 742dbd70d4503..95849a94bae29 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/AspectTemplateEngine.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.github.fge.jsonpatch.JsonPatchException; import com.github.fge.jsonpatch.Patch; @@ -13,25 +15,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** - * Holds connection between aspect specs and their templates and drives the generation from templates + * Holds connection between aspect specs and their templates and drives the generation from + * templates */ public class AspectTemplateEngine { - public static final Set SUPPORTED_TEMPLATES = Stream.of( - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).collect(Collectors.toSet()); + public static final Set SUPPORTED_TEMPLATES = + Stream.of( + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .collect(Collectors.toSet()); private final Map> _aspectTemplateMap; @@ -45,11 +47,14 @@ public AspectTemplateEngine(Map> aspe @Nullable public RecordTemplate getDefaultTemplate(String aspectSpecName) { - return _aspectTemplateMap.containsKey(aspectSpecName) ? _aspectTemplateMap.get(aspectSpecName).getDefault() : null; + return _aspectTemplateMap.containsKey(aspectSpecName) + ? _aspectTemplateMap.get(aspectSpecName).getDefault() + : null; } /** * Applies a json patch to a record, optionally merging array fields as necessary + * * @param recordTemplate original template to be updated * @param jsonPatch patch to apply * @param aspectSpec aspectSpec of the template @@ -58,7 +63,8 @@ public RecordTemplate getDefaultTemplate(String aspectSpecName) { * @throws JsonPatchException if there is an issue with applying the json patch */ @Nonnull - public RecordTemplate applyPatch(RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) + public RecordTemplate applyPatch( + RecordTemplate recordTemplate, Patch jsonPatch, AspectSpec aspectSpec) throws JsonProcessingException, JsonPatchException { Template template = getTemplate(aspectSpec); diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java index cf2f5552fbb73..44090b3a6d05b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/CompoundKeyTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -9,14 +12,13 @@ import com.linkedin.data.template.RecordTemplate; import java.util.List; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - -public abstract class CompoundKeyTemplate implements ArrayMergingTemplate { +public abstract class CompoundKeyTemplate + implements ArrayMergingTemplate { /** - * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent paths to be specified + * Necessary step for templates with compound keys due to JsonPatch not allowing non-existent + * paths to be specified + * * @param transformedNode transformed node to have keys populated * @return transformed node that has top level keys populated */ @@ -25,7 +27,8 @@ public JsonNode populateTopLevelKeys(JsonNode transformedNode, Patch jsonPatch) List paths = getPaths(jsonPatch); for (String path : paths) { String[] keys = path.split("/"); - // Skip first as it will always be blank due to path starting with /, skip last key as we only need to populate top level + // Skip first as it will always be blank due to path starting with /, skip last key as we only + // need to populate top level JsonNode parent = transformedNodeClone; for (int i = 1; i < keys.length - 1; i++) { if (parent.get(keys[i]) == null) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java index 4310c84ded0e2..0793cacce780f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/Template.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -8,26 +10,23 @@ import com.linkedin.data.template.RecordTemplate; import javax.annotation.Nonnull; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - public interface Template { /** * Cast method to get subtype of {@link RecordTemplate} for applying templating methods + * * @param recordTemplate generic record * @return specific type for this template * @throws {@link ClassCastException} when recordTemplate is not the correct type for the template */ T getSubtype(RecordTemplate recordTemplate) throws ClassCastException; - /** - * Get the template clas type - */ + /** Get the template clas type */ Class getTemplateType(); /** * Get a template aspect with defaults set + * * @return subtype of {@link RecordTemplate} that lines up with a predefined AspectSpec */ @Nonnull @@ -35,6 +34,7 @@ public interface Template { /** * Applies a specified {@link Patch} to an aspect + * * @param recordTemplate original {@link RecordTemplate} to be patched * @param jsonPatch patch to apply * @return patched value @@ -50,20 +50,24 @@ default T applyPatch(RecordTemplate recordTemplate, Patch jsonPatch) } /** - * Returns a json representation of the template, modified for template based operations to be compatible with patch - * semantics. + * Returns a json representation of the template, modified for template based operations to be + * compatible with patch semantics. + * * @param recordTemplate template to be transformed into json * @return a {@link JsonNode} representation of the template * @throws JsonProcessingException if there is an issue converting the input to JSON */ - default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonProcessingException { + default JsonNode preprocessTemplate(RecordTemplate recordTemplate) + throws JsonProcessingException { T subtype = getSubtype(recordTemplate); JsonNode baseNode = OBJECT_MAPPER.readTree(RecordUtils.toJsonString(subtype)); return transformFields(baseNode); } /** - * Transforms fields from base json representation of RecordTemplate to definition specific to aspect per patch semantics + * Transforms fields from base json representation of RecordTemplate to definition specific to + * aspect per patch semantics + * * @param baseNode the base node to be transformed * @return transformed {@link JsonNode} */ @@ -72,12 +76,10 @@ default JsonNode preprocessTemplate(RecordTemplate recordTemplate) throws JsonPr /** * Reserializes the patched {@link JsonNode} to the base {@link RecordTemplate} definition + * * @param patched the deserialized patched json in custom format per aspect spec * @return A {@link JsonNode} that has been retranslated from patch semantics */ @Nonnull JsonNode rebaseFields(JsonNode patched); - - - } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java index 0cd9a52c8fe60..a98e60c739749 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlobalTagsTemplate.java @@ -8,7 +8,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class GlobalTagsTemplate implements ArrayMergingTemplate { private static final String TAGS_FIELD_NAME = "tags"; @@ -45,6 +44,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); + return transformedMapToArray( + patched, TAGS_FIELD_NAME, Collections.singletonList(TAG_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java index e905404824022..7ce59916f2073 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/GlossaryTermsTemplate.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -11,10 +14,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsTemplate implements ArrayMergingTemplate { private static final String TERMS_FIELD_NAME = "terms"; @@ -40,8 +39,12 @@ public Class getTemplateType() { @Override public GlossaryTerms getDefault() { GlossaryTerms glossaryTerms = new GlossaryTerms(); - glossaryTerms.setTerms(new GlossaryTermAssociationArray()) - .setAuditStamp(new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + glossaryTerms + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp( + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return glossaryTerms; } @@ -52,8 +55,7 @@ public JsonNode transformFields(JsonNode baseNode) { // Set required deprecated field if (baseNode.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) baseNode).set(AUDIT_STAMP_FIELD, auditStampNode); } return arrayFieldToMap(baseNode, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); @@ -65,10 +67,10 @@ public JsonNode rebaseFields(JsonNode patched) { // Set required deprecated field if (patched.get(AUDIT_STAMP_FIELD) == null) { ObjectNode auditStampNode = instance.objectNode(); - auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR) - .put(TIME_FIELD, System.currentTimeMillis()); + auditStampNode.put(ACTOR_FIELD, SYSTEM_ACTOR).put(TIME_FIELD, System.currentTimeMillis()); ((ObjectNode) patched).set(AUDIT_STAMP_FIELD, auditStampNode); } - return transformedMapToArray(patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); + return transformedMapToArray( + patched, TERMS_FIELD_NAME, Collections.singletonList(URN_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java index 0a2cff4395b54..b850ae830b98c 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/common/OwnershipTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.common; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; @@ -10,9 +12,6 @@ import java.util.Arrays; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTemplate extends CompoundKeyTemplate { private static final String OWNERS_FIELD_NAME = "owners"; @@ -37,9 +36,10 @@ public Class getTemplateType() { public Ownership getDefault() { Ownership ownership = new Ownership(); ownership.setOwners(new OwnerArray()); - ownership.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); + ownership.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR))); return ownership; } @@ -47,12 +47,14 @@ public Ownership getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return arrayFieldToMap( + baseNode, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); + return transformedMapToArray( + patched, OWNERS_FIELD_NAME, Arrays.asList(OWNER_FIELD_NAME, TYPE_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java index 5997bd8e7910d..73e837f368f0b 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataflow/DataFlowInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataFlowInfoTemplate implements Template { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java index 9d25fa71286d3..bdb306c2d32e4 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInfoTemplate.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.models.registry.template.Template; import javax.annotation.Nonnull; - public class DataJobInfoTemplate implements Template { @Override diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java index b4ddb4523c9a5..889297734e977 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/datajob/DataJobInputOutputTemplate.java @@ -12,7 +12,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DataJobInputOutputTemplate implements ArrayMergingTemplate { private static final String INPUT_DATA_JOB_EDGES_FIELD_NAME = "inputDatajobEdges"; @@ -23,6 +22,7 @@ public class DataJobInputOutputTemplate implements ArrayMergingTemplate { private static final String ASSETS_FIELD_NAME = "assets"; @@ -44,6 +43,7 @@ public JsonNode transformFields(JsonNode baseNode) { @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); + return transformedMapToArray( + patched, ASSETS_FIELD_NAME, Collections.singletonList(KEY_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java index 3c1be1f7ecaad..991f7f3d4053a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/DatasetPropertiesTemplate.java @@ -9,7 +9,6 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class DatasetPropertiesTemplate implements ArrayMergingTemplate { private static final String TAGS_FIELD_NAME = "tags"; diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java index 62888d117b3de..9712a9081d33a 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/EditableSchemaMetadataTemplate.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.dataset; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.AuditStamp; @@ -13,9 +15,6 @@ import java.util.Collections; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate { private static final String EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME = "editableSchemaFieldInfo"; @@ -24,7 +23,8 @@ public class EditableSchemaMetadataTemplate extends CompoundKeyTemplate getTemplateType() { @Nonnull @Override public EditableSchemaMetadata getDefault() { - AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); return new EditableSchemaMetadata() .setCreated(auditStamp) .setLastModified(auditStamp) @@ -49,47 +52,70 @@ public EditableSchemaMetadata getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - JsonNode transformedNode = arrayFieldToMap(baseNode, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode transformedNode = + arrayFieldToMap( + baseNode, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template transforms to array subfields - transformedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); - } - }); + transformedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set( + GLOBAL_TAGS_FIELD_NAME, + globalTagsTemplate.transformFields(node.get(GLOBAL_TAGS_FIELD_NAME))); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.transformFields(node.get(GLOSSARY_TERMS_FIELD_NAME))); + } + }); return transformedNode; } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - JsonNode rebasedNode = transformedMapToArray(patched, EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, - Collections.singletonList(FIELDPATH_FIELD_NAME)); + JsonNode rebasedNode = + transformedMapToArray( + patched, + EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME, + Collections.singletonList(FIELDPATH_FIELD_NAME)); // Create temporary templates for array subfields GlobalTagsTemplate globalTagsTemplate = new GlobalTagsTemplate(); GlossaryTermsTemplate glossaryTermsTemplate = new GlossaryTermsTemplate(); // Apply template rebases to array subfields - rebasedNode.get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME).elements().forEachRemaining(node -> { - JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); - JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); - if (globalTags != null) { - ((ObjectNode) node).set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); - } - if (glossaryTerms != null) { - ((ObjectNode) node).set(GLOSSARY_TERMS_FIELD_NAME, glossaryTermsTemplate.rebaseFields(glossaryTerms)); - } - }); - + rebasedNode + .get(EDITABLE_SCHEMA_FIELD_INFO_FIELD_NAME) + .elements() + .forEachRemaining( + node -> { + JsonNode globalTags = node.get(GLOBAL_TAGS_FIELD_NAME); + JsonNode glossaryTerms = node.get(GLOSSARY_TERMS_FIELD_NAME); + if (globalTags != null) { + ((ObjectNode) node) + .set(GLOBAL_TAGS_FIELD_NAME, globalTagsTemplate.rebaseFields(globalTags)); + } + if (glossaryTerms != null) { + ((ObjectNode) node) + .set( + GLOSSARY_TERMS_FIELD_NAME, + glossaryTermsTemplate.rebaseFields(glossaryTerms)); + } + }); return rebasedNode; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java index 9e87b8a385328..35816895669be 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/dataset/UpstreamLineageTemplate.java @@ -9,11 +9,11 @@ import java.util.Collections; import javax.annotation.Nonnull; - public class UpstreamLineageTemplate implements ArrayMergingTemplate { private static final String UPSTREAMS_FIELD_NAME = "upstreams"; private static final String DATASET_FIELD_NAME = "dataset"; + // TODO: Fine Grained Lineages not patchable at this time, they don't have a well established key @Override @@ -42,12 +42,14 @@ public UpstreamLineage getDefault() { @Nonnull @Override public JsonNode transformFields(JsonNode baseNode) { - return arrayFieldToMap(baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return arrayFieldToMap( + baseNode, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } @Nonnull @Override public JsonNode rebaseFields(JsonNode patched) { - return transformedMapToArray(patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); + return transformedMapToArray( + patched, UPSTREAMS_FIELD_NAME, Collections.singletonList(DATASET_FIELD_NAME)); } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java index 6496ac125d867..18d070ec3da45 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/template/util/TemplateUtil.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry.template.util; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,27 +9,31 @@ import java.util.ArrayList; import java.util.List; -import static com.linkedin.metadata.Constants.*; - - public class TemplateUtil { - private TemplateUtil() { - - } + private TemplateUtil() {} public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public static List getPaths(Patch jsonPatch) { JsonNode patchNode = OBJECT_MAPPER.valueToTree(jsonPatch); List paths = new ArrayList<>(); - patchNode.elements().forEachRemaining(node -> { - paths.add(node.get("path").asText()); - }); + patchNode + .elements() + .forEachRemaining( + node -> { + paths.add(node.get("path").asText()); + }); return paths; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java index 0ce066b7a3433..ad16aec7f66d2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/DataSchemaFactoryTest.java @@ -1,20 +1,24 @@ package com.linkedin.metadata.models; +import static org.testng.Assert.*; + import com.linkedin.data.schema.DataSchema; import com.linkedin.metadata.models.registry.TestConstants; import java.nio.file.Paths; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DataSchemaFactoryTest { @Test public void testCustomClassLoading() throws Exception { - DataSchemaFactory dsf = DataSchemaFactory.withCustomClasspath(Paths.get( - TestConstants.BASE_DIRECTORY + "/" + TestConstants.TEST_REGISTRY + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dsf = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); // Assert that normally found aspects from the core model are missing Optional dataSchema = dsf.getAspectSchema("datasetProfile"); assertFalse(dataSchema.isPresent(), "datasetProfile"); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java index b95cb1085283f..e1ea80e2bcad2 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/EntitySpecBuilderTest.java @@ -1,8 +1,12 @@ package com.linkedin.metadata.models; -import com.datahub.test.TestBrowsePaths; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.test.SearchFeatures; import com.datahub.test.Snapshot; +import com.datahub.test.TestBrowsePaths; import com.datahub.test.TestEntityInfo; import com.datahub.test.TestEntityKey; import com.datahub.test.invalid.DuplicateSearchableFields; @@ -18,67 +22,76 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - - -/** - * Tests the capabilities of {@link EntitySpecBuilder} - */ +/** Tests the capabilities of {@link EntitySpecBuilder} */ public class EntitySpecBuilderTest { @Test public void testBuildAspectSpecValidationAspectMissingAnnotation() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingAspectAnnotation().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationInvalidSearchableFieldType() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new InvalidSearchableFieldType().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationDuplicateSearchableFields() { - AspectSpec aspectSpec = new EntitySpecBuilder() - .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); + AspectSpec aspectSpec = + new EntitySpecBuilder() + .buildAspectSpec(new DuplicateSearchableFields().schema(), RecordTemplate.class); - aspectSpec.getSearchableFieldSpecs().forEach(searchableFieldSpec -> { - String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); - assertTrue("textField".equals(name) || "textField2".equals(name)); - }); + aspectSpec + .getSearchableFieldSpecs() + .forEach( + searchableFieldSpec -> { + String name = searchableFieldSpec.getSearchableAnnotation().getFieldName(); + assertTrue("textField".equals(name) || "textField2".equals(name)); + }); } @Test public void testBuildAspectSpecValidationMissingRelationshipName() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new MissingRelationshipName().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonNumericSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonNumericSearchScoreField().schema(), RecordTemplate.class)); } @Test public void testBuildAspectSpecValidationNonSingularSearchScoreField() { - assertThrows(ModelValidationException.class, () -> - new EntitySpecBuilder().buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class) - ); + assertThrows( + ModelValidationException.class, + () -> + new EntitySpecBuilder() + .buildAspectSpec(new NonSingularSearchScoreField().schema(), RecordTemplate.class)); } + @Test public void testBuildEntitySpecs() { // Instantiate the test Snapshot final Snapshot snapshot = new Snapshot(); - final List validEntitySpecs = new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); + final List validEntitySpecs = + new EntitySpecBuilder().buildEntitySpecs(snapshot.schema()); // Assert single entity. assertEquals(1, validEntitySpecs.size()); @@ -110,116 +123,265 @@ public void testBuildEntitySpecs() { private void validateTestEntityKey(final AspectSpec keyAspectSpec) { assertEquals("testEntityKey", keyAspectSpec.getName()); - assertEquals(new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityKey().schema().getFullName(), keyAspectSpec.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(2, keyAspectSpec.getSearchableFieldSpecs().size()); // keyPart1, keyPart3 - assertEquals("keyPart1", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart1").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("keyPart3", keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, keyAspectSpec.getSearchableFieldSpecMap().get(new PathSpec("keyPart3").toString()) - .getSearchableAnnotation().getFieldType()); + assertEquals( + "keyPart1", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart1").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "keyPart3", + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + keyAspectSpec + .getSearchableFieldSpecMap() + .get(new PathSpec("keyPart3").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Field assertEquals(1, keyAspectSpec.getRelationshipFieldSpecs().size()); - assertEquals("keyForeignKey", keyAspectSpec.getRelationshipFieldSpecMap().get(new PathSpec("keyPart2").toString()).getRelationshipName()); + assertEquals( + "keyForeignKey", + keyAspectSpec + .getRelationshipFieldSpecMap() + .get(new PathSpec("keyPart2").toString()) + .getRelationshipName()); } - private void validateBrowsePaths(final AspectSpec browsePathAspectSpec) { assertEquals("testBrowsePaths", browsePathAspectSpec.getName()); - assertEquals(new TestBrowsePaths().schema().getFullName(), browsePathAspectSpec.getPegasusSchema().getFullName()); + assertEquals( + new TestBrowsePaths().schema().getFullName(), + browsePathAspectSpec.getPegasusSchema().getFullName()); assertEquals(1, browsePathAspectSpec.getSearchableFieldSpecs().size()); - assertEquals(SearchableAnnotation.FieldType.BROWSE_PATH, browsePathAspectSpec.getSearchableFieldSpecs().get(0) - .getSearchableAnnotation().getFieldType()); + assertEquals( + SearchableAnnotation.FieldType.BROWSE_PATH, + browsePathAspectSpec + .getSearchableFieldSpecs() + .get(0) + .getSearchableAnnotation() + .getFieldType()); } private void validateTestEntityInfo(final AspectSpec testEntityInfo) { assertEquals("testEntityInfo", testEntityInfo.getName()); - assertEquals(new TestEntityInfo().schema().getFullName(), testEntityInfo.getPegasusSchema().getFullName()); + assertEquals( + new TestEntityInfo().schema().getFullName(), + testEntityInfo.getPegasusSchema().getFullName()); // Assert on Searchable Fields assertEquals(testEntityInfo.getSearchableFieldSpecs().size(), 11); - assertEquals("customProperties", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.KEYWORD, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("customProperties").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textFieldOverride", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("textArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT_PARTIAL, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("textArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("wordGramField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.WORD_GRAM, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("wordGramField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedIntegerField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.COUNT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedIntegerField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayStringField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("nestedArrayArrayField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.TEXT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("esObjectField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.OBJECT, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("esObjectField").toString()) - .getSearchableAnnotation().getFieldType()); - assertEquals("foreignKey", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(true, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getSearchableAnnotation().isQueryByDefault()); - assertEquals("doubleField", testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldName()); - assertEquals(SearchableAnnotation.FieldType.DOUBLE, testEntityInfo.getSearchableFieldSpecMap().get( - new PathSpec("doubleField").toString()).getSearchableAnnotation().getFieldType()); - + assertEquals( + "customProperties", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.KEYWORD, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("customProperties").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textFieldOverride", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "textArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT_PARTIAL, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("textArrayField", "*").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "wordGramField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.WORD_GRAM, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("wordGramField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedIntegerField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.COUNT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedIntegerField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayStringField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayStringField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "nestedArrayArrayField", + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.TEXT, + testEntityInfo + .getSearchableFieldSpecMap() + .get( + new PathSpec("nestedRecordArrayField", "*", "nestedArrayArrayField", "*") + .toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "esObjectField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.OBJECT, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("esObjectField").toString()) + .getSearchableAnnotation() + .getFieldType()); + assertEquals( + "foreignKey", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + true, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getSearchableAnnotation() + .isQueryByDefault()); + assertEquals( + "doubleField", + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldName()); + assertEquals( + SearchableAnnotation.FieldType.DOUBLE, + testEntityInfo + .getSearchableFieldSpecMap() + .get(new PathSpec("doubleField").toString()) + .getSearchableAnnotation() + .getFieldType()); // Assert on Relationship Fields assertEquals(4, testEntityInfo.getRelationshipFieldSpecs().size()); - assertEquals("foreignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKey").toString()).getRelationshipName()); - assertEquals("foreignKeyArray", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("foreignKeyArray", "*").toString()).getRelationshipName()); - assertEquals("nestedForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordField", "nestedForeignKey").toString()).getRelationshipName()); - assertEquals("nestedArrayForeignKey", testEntityInfo.getRelationshipFieldSpecMap().get( - new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()).getRelationshipName()); + assertEquals( + "foreignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKey").toString()) + .getRelationshipName()); + assertEquals( + "foreignKeyArray", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("foreignKeyArray", "*").toString()) + .getRelationshipName()); + assertEquals( + "nestedForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordField", "nestedForeignKey").toString()) + .getRelationshipName()); + assertEquals( + "nestedArrayForeignKey", + testEntityInfo + .getRelationshipFieldSpecMap() + .get(new PathSpec("nestedRecordArrayField", "*", "nestedArrayForeignKey").toString()) + .getRelationshipName()); } private void validateSearchFeatures(final AspectSpec searchFeaturesAspectSpec) { assertEquals("searchFeatures", searchFeaturesAspectSpec.getName()); - assertEquals(new SearchFeatures().schema().getFullName(), + assertEquals( + new SearchFeatures().schema().getFullName(), searchFeaturesAspectSpec.getPegasusSchema().getFullName()); assertEquals(2, searchFeaturesAspectSpec.getSearchScoreFieldSpecs().size()); - assertEquals("feature1", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature1").toString()) - .getSearchScoreAnnotation() - .getFieldName()); - assertEquals("feature2", searchFeaturesAspectSpec.getSearchScoreFieldSpecMap() - .get(new PathSpec("feature2").toString()) - .getSearchScoreAnnotation() - .getFieldName()); + assertEquals( + "feature1", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature1").toString()) + .getSearchScoreAnnotation() + .getFieldName()); + assertEquals( + "feature2", + searchFeaturesAspectSpec + .getSearchScoreFieldSpecMap() + .get(new PathSpec("feature2").toString()) + .getSearchScoreAnnotation() + .getFieldName()); } - } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java index 320dfc47f21e4..852e4f19bac12 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/ConfigEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.datahub.test.TestEntityProfile; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.EntitySpec; @@ -9,21 +11,22 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ConfigEntityRegistryTest { @BeforeTest public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @Test public void testEntityRegistry() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); Map entitySpecs = configEntityRegistry.getEntitySpecs(); Map eventSpecs = configEntityRegistry.getEventSpecs(); @@ -54,9 +57,11 @@ public void testEntityRegistry() throws FileNotFoundException { @Test public void testEntityRegistryIdentifier() throws FileNotFoundException { - ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); assertEquals(configEntityRegistry.getIdentifier(), "test-registry"); } } - diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java index 05d23eb4b455f..20a64f9af25c0 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/LineageRegistryTest.java @@ -1,5 +1,11 @@ package com.linkedin.metadata.models.registry; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.RelationshipFieldSpec; @@ -11,13 +17,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class LineageRegistryTest { @Test public void testRegistryWhenEmpty() { @@ -33,14 +32,16 @@ public void testRegistry() { Map mockEntitySpecs = new HashMap<>(); EntitySpec mockDatasetSpec = mock(EntitySpec.class); List datasetRelations = - ImmutableList.of(buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), + ImmutableList.of( + buildSpec("DownstreamOf", ImmutableList.of("dataset"), true, true), buildSpec("AssociatedWith", ImmutableList.of("tag"), true, false), buildSpec("AssociatedWith", ImmutableList.of("glossaryTerm"), true, false)); when(mockDatasetSpec.getRelationshipFieldSpecs()).thenReturn(datasetRelations); mockEntitySpecs.put("dataset", mockDatasetSpec); EntitySpec mockJobSpec = mock(EntitySpec.class); List jobRelations = - ImmutableList.of(buildSpec("Produces", ImmutableList.of("dataset"), false, true), + ImmutableList.of( + buildSpec("Produces", ImmutableList.of("dataset"), false, true), buildSpec("Consumes", ImmutableList.of("dataset"), true, true)); when(mockJobSpec.getRelationshipFieldSpecs()).thenReturn(jobRelations); mockEntitySpecs.put("dataJob", mockJobSpec); @@ -50,22 +51,51 @@ public void testRegistry() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); LineageRegistry.LineageSpec lineageSpec = lineageRegistry.getLineageSpec("dataset"); assertEquals(lineageSpec.getUpstreamEdges().size(), 2); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); - assertTrue(lineageSpec.getUpstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Produces", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.OUTGOING, "dataset"))); + assertTrue( + lineageSpec + .getUpstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Produces", RelationshipDirection.INCOMING, "dataJob"))); assertEquals(lineageSpec.getDownstreamEdges().size(), 2); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); - assertTrue(lineageSpec.getDownstreamEdges() - .contains(new LineageRegistry.EdgeInfo("Consumes", RelationshipDirection.INCOMING, "dataJob"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "DownstreamOf", RelationshipDirection.INCOMING, "dataset"))); + assertTrue( + lineageSpec + .getDownstreamEdges() + .contains( + new LineageRegistry.EdgeInfo( + "Consumes", RelationshipDirection.INCOMING, "dataJob"))); } - private RelationshipFieldSpec buildSpec(String relationshipType, List destinationEntityTypes, - boolean isUpstream, boolean isLineage) { + private RelationshipFieldSpec buildSpec( + String relationshipType, + List destinationEntityTypes, + boolean isUpstream, + boolean isLineage) { RelationshipFieldSpec spec = mock(RelationshipFieldSpec.class); - when(spec.getRelationshipAnnotation()).thenReturn( - new RelationshipAnnotation(relationshipType, destinationEntityTypes, isUpstream, isLineage, null, null, null, null, null)); + when(spec.getRelationshipAnnotation()) + .thenReturn( + new RelationshipAnnotation( + relationshipType, + destinationEntityTypes, + isUpstream, + isLineage, + null, + null, + null, + null, + null)); return spec; } } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java index 38664fedb1570..1652a51290597 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PatchEntityRegistryTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.models.registry; +import static org.testng.Assert.*; + import com.linkedin.metadata.models.DataSchemaFactory; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EventSpec; @@ -7,20 +9,19 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class PatchEntityRegistryTest { @Test public void testEntityRegistryLoad() throws Exception, EntityRegistryException { - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString(), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString(), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); @@ -40,21 +41,27 @@ public void testEntityRegistryLoad() throws Exception, EntityRegistryException { /** * Validate that patch entity registries can have key aspects + * * @throws Exception * @throws EntityRegistryException */ @Test public void testEntityRegistryWithKeyLoad() throws Exception, EntityRegistryException { - DataSchemaFactory dataSchemaFactory = DataSchemaFactory.withCustomClasspath( - Paths.get(TestConstants.BASE_DIRECTORY - + "/" - + TestConstants.TEST_REGISTRY - + "/" - + TestConstants.TEST_VERSION.toString())); + DataSchemaFactory dataSchemaFactory = + DataSchemaFactory.withCustomClasspath( + Paths.get( + TestConstants.BASE_DIRECTORY + + "/" + + TestConstants.TEST_REGISTRY + + "/" + + TestConstants.TEST_VERSION.toString())); - PatchEntityRegistry patchEntityRegistry = new PatchEntityRegistry( - dataSchemaFactory, Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), - TestConstants.TEST_REGISTRY, TestConstants.TEST_VERSION); + PatchEntityRegistry patchEntityRegistry = + new PatchEntityRegistry( + dataSchemaFactory, + Paths.get("src/test_plugins/mycompany-full-model/0.0.1/entity-registry.yaml"), + TestConstants.TEST_REGISTRY, + TestConstants.TEST_VERSION); Map entitySpecs = patchEntityRegistry.getEntitySpecs(); assertEquals(entitySpecs.values().size(), 1); diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java index 06ed794ecc684..b3eb2af72708c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/PluginEntityRegistryLoaderTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.models.registry; +import static com.linkedin.metadata.models.registry.TestConstants.*; +import static org.testng.Assert.*; + import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.RecordDataSchema; @@ -28,122 +31,137 @@ import org.apache.maven.artifact.versioning.ComparableVersion; import org.testng.annotations.Test; -import static com.linkedin.metadata.models.registry.TestConstants.*; -import static org.testng.Assert.*; - - public class PluginEntityRegistryLoaderTest { @Test public void testEntityRegistry() throws FileNotFoundException, InterruptedException { - EntityRegistry baseEntityRegistry = new EntityRegistry() { - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - return null; - } - - @Nonnull - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - return null; - } - - @Nonnull - @Override - public Map getEntitySpecs() { - return null; - } - - @Nonnull - @Override - public Map getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map getEventSpecs() { - return null; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + return null; + } + + @Nonnull + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + return null; + } + + @Nonnull + @Override + public Map getEntitySpecs() { + return null; + } + + @Nonnull + @Override + public Map getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map getEventSpecs() { + return null; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; MergedEntityRegistry configEntityRegistry = new MergedEntityRegistry(baseEntityRegistry); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY).withBaseRegistry(configEntityRegistry).start(true); + new PluginEntityRegistryLoader(TestConstants.BASE_DIRECTORY) + .withBaseRegistry(configEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TestConstants.TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TestConstants.TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); assertEquals(loadResult.getLoadResult(), LoadStatus.FAILURE); } private EntityRegistry getBaseEntityRegistry() { final AspectSpec keyAspectSpec = - new AspectSpec(new AspectAnnotation("datasetKey", false, false, null), Collections.emptyList(), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), + new AspectSpec( + new AspectAnnotation("datasetKey", false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), (RecordDataSchema) DataSchemaFactory.getInstance().getAspectSchema("datasetKey").get(), DataSchemaFactory.getInstance().getAspectClass("datasetKey").get()); final Map entitySpecMap = new HashMap<>(1); List aspectSpecList = new ArrayList<>(1); aspectSpecList.add(keyAspectSpec); - EntitySpec baseEntitySpec = new DefaultEntitySpec(aspectSpecList, new EntityAnnotation("dataset", "datasetKey"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); + EntitySpec baseEntitySpec = + new DefaultEntitySpec( + aspectSpecList, + new EntityAnnotation("dataset", "datasetKey"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEntitySchema("dataset").get()); entitySpecMap.put("dataset", baseEntitySpec); final Map eventSpecMap = new HashMap<>(1); - EventSpec baseEventSpec = new DefaultEventSpec("testEvent", new EventAnnotation("testEvent"), - (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); + EventSpec baseEventSpec = + new DefaultEventSpec( + "testEvent", + new EventAnnotation("testEvent"), + (RecordDataSchema) DataSchemaFactory.getInstance().getEventSchema("testEvent").get()); eventSpecMap.put("testevent", baseEventSpec); - EntityRegistry baseEntityRegistry = new EntityRegistry() { - - @Nonnull - @Override - public EntitySpec getEntitySpec(@Nonnull String entityName) { - assertEquals(entityName, "dataset"); - return baseEntitySpec; - } - - @Nullable - @Override - public EventSpec getEventSpec(@Nonnull String eventName) { - assertEquals(eventName, "testEvent"); - return baseEventSpec; - } - - @Nonnull - @Override - public Map getEntitySpecs() { - return entitySpecMap; - } - - @Nonnull - @Override - public Map getAspectSpecs() { - return new HashMap<>(); - } - - @Nonnull - @Override - public Map getEventSpecs() { - return eventSpecMap; - } - - @Nonnull - @Override - public AspectTemplateEngine getAspectTemplateEngine() { - return new AspectTemplateEngine(); - } - }; + EntityRegistry baseEntityRegistry = + new EntityRegistry() { + + @Nonnull + @Override + public EntitySpec getEntitySpec(@Nonnull String entityName) { + assertEquals(entityName, "dataset"); + return baseEntitySpec; + } + + @Nullable + @Override + public EventSpec getEventSpec(@Nonnull String eventName) { + assertEquals(eventName, "testEvent"); + return baseEventSpec; + } + + @Nonnull + @Override + public Map getEntitySpecs() { + return entitySpecMap; + } + + @Nonnull + @Override + public Map getAspectSpecs() { + return new HashMap<>(); + } + + @Nonnull + @Override + public Map getEventSpecs() { + return eventSpecMap; + } + + @Nonnull + @Override + public AspectTemplateEngine getAspectTemplateEngine() { + return new AspectTemplateEngine(); + } + }; return baseEntityRegistry; } @@ -152,12 +170,21 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(BASE_DIRECTORY).withBaseRegistry(mergedEntityRegistry).start(true); + new PluginEntityRegistryLoader(BASE_DIRECTORY) + .withBaseRegistry(mergedEntityRegistry) + .start(true); assertEquals(pluginEntityRegistryLoader.getPatchRegistries().size(), 1); EntityRegistryLoadResult loadResult = - pluginEntityRegistryLoader.getPatchRegistries().get(TEST_REGISTRY).get(TEST_VERSION).getSecond(); + pluginEntityRegistryLoader + .getPatchRegistries() + .get(TEST_REGISTRY) + .get(TEST_VERSION) + .getSecond(); assertNotNull(loadResult); - assertEquals(loadResult.getLoadResult(), LoadStatus.SUCCESS, "load failed with " + loadResult.getFailureReason()); + assertEquals( + loadResult.getLoadResult(), + LoadStatus.SUCCESS, + "load failed with " + loadResult.getFailureReason()); Map entitySpecs = mergedEntityRegistry.getEntitySpecs(); @@ -165,7 +192,8 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter assertEquals(entitySpec.getName(), "dataset"); assertEquals(entitySpec.getKeyAspectSpec().getName(), "datasetKey"); Optional dataSchema = - Optional.ofNullable(entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); + Optional.ofNullable( + entitySpecs.get("dataset").getAspectSpec("datasetKey").getPegasusSchema()); assertTrue(dataSchema.isPresent(), "datasetKey"); assertNotNull(entitySpec.getAspectSpec("testDataQualityRules")); assertEquals(entitySpecs.values().size(), 1); @@ -179,37 +207,65 @@ public void testEntityRegistryWithGoodBase() throws FileNotFoundException, Inter @Test /** - * Tests that we can load up entity registries that represent safe evolutions as well as decline to load registries that represent unsafe evolutions. - * - */ public void testEntityRegistryVersioning() throws InterruptedException { + * Tests that we can load up entity registries that represent safe evolutions as well as decline + * to load registries that represent unsafe evolutions. + */ + public void testEntityRegistryVersioning() throws InterruptedException { MergedEntityRegistry mergedEntityRegistry = new MergedEntityRegistry(getBaseEntityRegistry()); String multiversionPluginDir = "src/test_plugins/"; PluginEntityRegistryLoader pluginEntityRegistryLoader = - new PluginEntityRegistryLoader(multiversionPluginDir).withBaseRegistry(mergedEntityRegistry).start(true); - Map>> loadedRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); + new PluginEntityRegistryLoader(multiversionPluginDir) + .withBaseRegistry(mergedEntityRegistry) + .start(true); + Map>> + loadedRegistries = pluginEntityRegistryLoader.getPatchRegistries(); String registryName = "mycompany-dq-model"; assertTrue(loadedRegistries.containsKey(registryName)); assertTrue(loadedRegistries.get(registryName).containsKey(new ComparableVersion("0.0.1"))); - System.out.println(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getFailureReason()); - - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.1")).getSecond().getLoadResult(), + System.out.println( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getFailureReason()); + + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.1")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.2")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.2")) + .getSecond() + .getLoadResult(), LoadStatus.SUCCESS); - assertEquals(loadedRegistries.get(registryName).get(new ComparableVersion("0.0.3")).getSecond().getLoadResult(), + assertEquals( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getLoadResult(), LoadStatus.FAILURE); - assertTrue(loadedRegistries.get(registryName) - .get(new ComparableVersion("0.0.3")) - .getSecond() - .getFailureReason() - .contains("new record removed required fields type")); + assertTrue( + loadedRegistries + .get(registryName) + .get(new ComparableVersion("0.0.3")) + .getSecond() + .getFailureReason() + .contains("new record removed required fields type")); assertTrue(mergedEntityRegistry.getEntitySpec("dataset").hasAspect("dataQualityRules")); RecordDataSchema dataSchema = - mergedEntityRegistry.getEntitySpec("dataset").getAspectSpec("dataQualityRules").getPegasusSchema(); + mergedEntityRegistry + .getEntitySpec("dataset") + .getAspectSpec("dataQualityRules") + .getPegasusSchema(); ArrayDataSchema arrayDataSchema = (ArrayDataSchema) dataSchema.getField("rules").getType().getDereferencedDataSchema(); // Aspect Schema should be the same as version 0.0.2, checking to see that all fields exist diff --git a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java index ae46f3796aa73..43ae86076ae8c 100644 --- a/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java +++ b/entity-registry/src/test/java/com/linkedin/metadata/models/registry/TestConstants.java @@ -2,7 +2,6 @@ import org.apache.maven.artifact.versioning.ComparableVersion; - public class TestConstants { public static final String TEST_REGISTRY = "mycompany-dq-model"; public static final String BASE_DIRECTORY = "custom-test-model/build/plugins/models"; @@ -10,6 +9,5 @@ public class TestConstants { public static final String TEST_ASPECT_NAME = "testDataQualityRules"; public static final String TEST_EVENT_NAME = "dataQualityEvent"; - private TestConstants() { - } + private TestConstants() {} } diff --git a/gradle/checkstyle/checkstyle.xml b/gradle/checkstyle/checkstyle.xml deleted file mode 100644 index a9bffe839edad..0000000000000 --- a/gradle/checkstyle/checkstyle.xml +++ /dev/null @@ -1,198 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/gradle/checkstyle/suppressions.xml b/gradle/checkstyle/suppressions.xml deleted file mode 100644 index 829689ba35611..0000000000000 --- a/gradle/checkstyle/suppressions.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - diff --git a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java index e71fe6266b955..02aeb047a4d3e 100644 --- a/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java +++ b/ingestion-scheduler/src/main/java/com/datahub/metadata/ingestion/IngestionScheduler.java @@ -1,7 +1,6 @@ package com.datahub.metadata.ingestion; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,6 +17,7 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.query.ListResult; import com.linkedin.metadata.utils.GenericRecordUtils; @@ -45,29 +45,31 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.scheduling.support.CronSequenceGenerator; - /** - * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined - * within DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule - * based on the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class + * This class serves as a stateful scheduler of Ingestion Runs for Ingestion Sources defined within + * DataHub. It manages storing and triggering ingestion sources on a pre-defined schedule based on + * the information present in the {@link DataHubIngestionSourceInfo} aspect. As such, this class * should never be instantiated more than once - it's a singleton. * - * When the scheduler is created, it will first batch load all "info" aspects associated with the DataHubIngestionSource entity. - * It then iterates through all the aspects and attempts to extract a Quartz-cron (* * * * *) formatted schedule string & timezone from each. - * Upon finding a schedule and timezone, the "next execution time" as a relative timestamp is computed and a task - * is scheduled at that time in the future. + *

When the scheduler is created, it will first batch load all "info" aspects associated with the + * DataHubIngestionSource entity. It then iterates through all the aspects and attempts to extract a + * Quartz-cron (* * * * *) formatted schedule string & timezone from each. Upon finding a schedule + * and timezone, the "next execution time" as a relative timestamp is computed and a task is + * scheduled at that time in the future. * - * The child task is scheduled on another thread via {@link ScheduledExecutorService} and is responsible for creating a - * new DataHubExecutionRequest entity instance using an {@link EntityClient}. The execution request includes the inputs required - * to execute an ingestion source: an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream - * "action" which actually executes the ingestion process and reports the status back. + *

The child task is scheduled on another thread via {@link ScheduledExecutorService} and is + * responsible for creating a new DataHubExecutionRequest entity instance using an {@link + * EntityClient}. The execution request includes the inputs required to execute an ingestion source: + * an Ingestion Recipe encoded as JSON. This in turn triggers the execution of a downstream "action" + * which actually executes the ingestion process and reports the status back. * - * After initial load, this class will continuously listen to the MetadataChangeProposal stream and update its local cache based - * on changes performed against Ingestion Source entities. Specifically, if the schedule of an Ingestion Source is changed in any way, - * the next execution time of that source will be recomputed, with previously scheduled execution clear if necessary. + *

After initial load, this class will continuously listen to the MetadataChangeProposal stream + * and update its local cache based on changes performed against Ingestion Source entities. + * Specifically, if the schedule of an Ingestion Source is changed in any way, the next execution + * time of that source will be recomputed, with previously scheduled execution clear if necessary. * - * On top of that, the component can also refresh its entire cache periodically. By default, it batch loads all the latest - * schedules on a once-per-day cadence. + *

On top of that, the component can also refresh its entire cache periodically. By default, it + * batch loads all the latest schedules on a once-per-day cadence. */ @Slf4j @RequiredArgsConstructor @@ -76,32 +78,35 @@ public class IngestionScheduler { private final Authentication _systemAuthentication; private final EntityClient _entityClient; - // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the source + // Maps a DataHubIngestionSource to a future representing the "next" scheduled execution of the + // source // Visible for testing final Map> _nextIngestionSourceExecutionCache = new HashMap<>(); // Shared executor service used for executing an ingestion source on a schedule - private final ScheduledExecutorService _sharedExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _sharedExecutorService = + Executors.newScheduledThreadPool(1); private final IngestionConfiguration _ingestionConfiguration; private final int _batchGetDelayIntervalSeconds; private final int _batchGetRefreshIntervalSeconds; public void init() { - final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = new BatchRefreshSchedulesRunnable( - _systemAuthentication, - _entityClient, - this::scheduleNextIngestionSourceExecution, - this::unscheduleAll); + final BatchRefreshSchedulesRunnable batchRefreshSchedulesRunnable = + new BatchRefreshSchedulesRunnable( + _systemAuthentication, + _entityClient, + this::scheduleNextIngestionSourceExecution, + this::unscheduleAll); // Schedule a recurring batch-reload task. _sharedExecutorService.scheduleAtFixedRate( - batchRefreshSchedulesRunnable, _batchGetDelayIntervalSeconds, _batchGetRefreshIntervalSeconds, + batchRefreshSchedulesRunnable, + _batchGetDelayIntervalSeconds, + _batchGetRefreshIntervalSeconds, TimeUnit.SECONDS); } - /** - * Removes the next scheduled execution of a particular ingestion source, if it exists. - */ + /** Removes the next scheduled execution of a particular ingestion source, if it exists. */ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) { log.info("Unscheduling ingestion source with urn {}", ingestionSourceUrn); // Deleting an ingestion source schedule. Un-schedule the next execution. @@ -113,20 +118,25 @@ public void unscheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn) } /** - * Un-schedule all ingestion sources that are scheduled for execution. This is performed on refresh of ingestion sources. + * Un-schedule all ingestion sources that are scheduled for execution. This is performed on + * refresh of ingestion sources. */ public void unscheduleAll() { // Deleting an ingestion source schedule. Un-schedule the next execution. - Set scheduledSources = new HashSet<>(_nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. + Set scheduledSources = + new HashSet<>( + _nextIngestionSourceExecutionCache.keySet()); // Create copy to avoid concurrent mod. for (Urn urn : scheduledSources) { unscheduleNextIngestionSourceExecution(urn); } } /** - * Computes and schedules the next execution time for a particular Ingestion Source, if it has not already been scheduled. + * Computes and schedules the next execution time for a particular Ingestion Source, if it has not + * already been scheduled. */ - public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { + public void scheduleNextIngestionSourceExecution( + final Urn ingestionSourceUrn, final DataHubIngestionSourceInfo newInfo) { // 1. Attempt to un-schedule any previous executions unscheduleNextIngestionSourceExecution(ingestionSourceUrn); @@ -137,50 +147,63 @@ public void scheduleNextIngestionSourceExecution(final Urn ingestionSourceUrn, f // 2. Schedule the next run of the ingestion source log.info( - String.format("Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", - ingestionSourceUrn, - schedule.getInterval(GetMode.NULL))); + String.format( + "Scheduling next execution of Ingestion Source with urn %s. Schedule: %s", + ingestionSourceUrn, schedule.getInterval(GetMode.NULL))); // Construct the new cron expression final String modifiedCronInterval = adjustCronInterval(schedule.getInterval()); if (CronSequenceGenerator.isValidExpression(modifiedCronInterval)) { final String timezone = schedule.hasTimezone() ? schedule.getTimezone() : "UTC"; - final CronSequenceGenerator generator = new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); + final CronSequenceGenerator generator = + new CronSequenceGenerator(modifiedCronInterval, TimeZone.getTimeZone(timezone)); final Date currentDate = new Date(); final Date nextExecDate = generator.next(currentDate); final long scheduleTime = nextExecDate.getTime() - currentDate.getTime(); // Schedule the ingestion source to run some time in the future. - final ExecutionRequestRunnable executionRequestRunnable = new ExecutionRequestRunnable( - _systemAuthentication, - _entityClient, - _ingestionConfiguration, - ingestionSourceUrn, - newInfo, - () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), - this::scheduleNextIngestionSourceExecution); + final ExecutionRequestRunnable executionRequestRunnable = + new ExecutionRequestRunnable( + _systemAuthentication, + _entityClient, + _ingestionConfiguration, + ingestionSourceUrn, + newInfo, + () -> _nextIngestionSourceExecutionCache.remove(ingestionSourceUrn), + this::scheduleNextIngestionSourceExecution); // Schedule the next ingestion run - final ScheduledFuture scheduledFuture = _sharedExecutorService.schedule(executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); + final ScheduledFuture scheduledFuture = + _sharedExecutorService.schedule( + executionRequestRunnable, scheduleTime, TimeUnit.MILLISECONDS); _nextIngestionSourceExecutionCache.put(ingestionSourceUrn, scheduledFuture); - log.info(String.format("Scheduled next execution of Ingestion Source with urn %s in %sms.", ingestionSourceUrn, scheduleTime)); + log.info( + String.format( + "Scheduled next execution of Ingestion Source with urn %s in %sms.", + ingestionSourceUrn, scheduleTime)); } else { - log.error(String.format("Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", schedule.getInterval(), ingestionSourceUrn)); + log.error( + String.format( + "Found malformed Ingestion Source schedule: %s for urn: %s. Skipping scheduling.", + schedule.getInterval(), ingestionSourceUrn)); } } else { - log.info(String.format("Ingestion source with urn %s has no configured schedule. Not scheduling.", ingestionSourceUrn)); + log.info( + String.format( + "Ingestion source with urn %s has no configured schedule. Not scheduling.", + ingestionSourceUrn)); } } /** * A {@link Runnable} used to periodically re-populate the schedules cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using schedules stored in the backend. + *

Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using schedules stored in the backend. */ @VisibleForTesting static class BatchRefreshSchedulesRunnable implements Runnable { @@ -193,11 +216,13 @@ static class BatchRefreshSchedulesRunnable implements Runnable { public BatchRefreshSchedulesRunnable( @Nonnull final Authentication systemAuthentication, @Nonnull final EntityClient entityClient, - @Nonnull final BiConsumer scheduleNextIngestionSourceExecution, + @Nonnull + final BiConsumer scheduleNextIngestionSourceExecution, @Nonnull final Runnable unscheduleAll) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); _unscheduleAll = unscheduleAll; } @@ -214,25 +239,31 @@ public void run() { while (start < total) { try { - log.debug(String.format("Batch fetching ingestion source schedules. start: %s, count: %s ", start, count)); + log.debug( + String.format( + "Batch fetching ingestion source schedules. start: %s, count: %s ", + start, count)); // 1. List all ingestion source urns. - final ListResult ingestionSourceUrns = _entityClient.list( - Constants.INGESTION_SOURCE_ENTITY_NAME, - Collections.emptyMap(), - start, - count, - _systemAuthentication); + final ListResult ingestionSourceUrns = + _entityClient.list( + Constants.INGESTION_SOURCE_ENTITY_NAME, + Collections.emptyMap(), + start, + count, + _systemAuthentication); // 2. Fetch all ingestion sources, specifically the "info" aspect. - final Map ingestionSources = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ingestionSourceUrns.getEntities()), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - _systemAuthentication); + final Map ingestionSources = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ingestionSourceUrns.getEntities()), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + _systemAuthentication); // 3. Reschedule ingestion sources based on the fetched schedules (inside "info") - log.debug("Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); + log.debug( + "Received batch of Ingestion Source Info aspects. Attempting to re-schedule execution requests."); // Then schedule the next ingestion runs scheduleNextIngestionRuns(new ArrayList<>(ingestionSources.values())); @@ -242,29 +273,33 @@ public void run() { } catch (RemoteInvocationException e) { log.error( - String.format("Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", - start, - count), + String.format( + "Failed to retrieve ingestion sources! Skipping updating schedule cache until next refresh. start: %s, count: %s", + start, count), e); return; } } log.info(String.format("Successfully fetched %s ingestion sources.", total)); } catch (Exception e) { - log.error("Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Ingestion Sources. Will retry on next scheduled attempt.", + e); } } /** - * Attempts to reschedule the next ingestion source run based on a batch of {@link EntityResponse} objects - * received from the Metadata Service. + * Attempts to reschedule the next ingestion source run based on a batch of {@link + * EntityResponse} objects received from the Metadata Service. */ - private void scheduleNextIngestionRuns(@Nonnull final List ingestionSourceEntities) { + private void scheduleNextIngestionRuns( + @Nonnull final List ingestionSourceEntities) { for (final EntityResponse response : ingestionSourceEntities) { final Urn entityUrn = response.getUrn(); final EnvelopedAspectMap aspects = response.getAspects(); final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); // Invoke the "scheduleNextIngestionSourceExecution" (passed from parent) _scheduleNextIngestionSourceExecution.accept(entityUrn, ingestionSourceInfo); @@ -275,7 +310,8 @@ private void scheduleNextIngestionRuns(@Nonnull final List inges /** * A {@link Runnable} used to create Ingestion Execution Requests. * - * The expectation is that there's a downstream action which is listening and executing new Execution Requests. + *

The expectation is that there's a downstream action which is listening and executing new + * Execution Requests. */ @VisibleForTesting static class ExecutionRequestRunnable implements Runnable { @@ -294,7 +330,8 @@ static class ExecutionRequestRunnable implements Runnable { private final Urn _ingestionSourceUrn; private final DataHubIngestionSourceInfo _ingestionSourceInfo; - // Used for clearing the "next execution" cache once a corresponding execution request has been created. + // Used for clearing the "next execution" cache once a corresponding execution request has been + // created. private final Runnable _deleteNextIngestionSourceExecution; // Used for re-scheduling the ingestion source once it has executed! @@ -307,27 +344,33 @@ public ExecutionRequestRunnable( @Nonnull final Urn ingestionSourceUrn, @Nonnull final DataHubIngestionSourceInfo ingestionSourceInfo, @Nonnull final Runnable deleteNextIngestionSourceExecution, - @Nonnull final BiConsumer scheduleNextIngestionSourceExecution) { + @Nonnull + final BiConsumer + scheduleNextIngestionSourceExecution) { _systemAuthentication = Objects.requireNonNull(systemAuthentication); _entityClient = Objects.requireNonNull(entityClient); _ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); _ingestionSourceUrn = Objects.requireNonNull(ingestionSourceUrn); _ingestionSourceInfo = Objects.requireNonNull(ingestionSourceInfo); - _deleteNextIngestionSourceExecution = Objects.requireNonNull(deleteNextIngestionSourceExecution); - _scheduleNextIngestionSourceExecution = Objects.requireNonNull(scheduleNextIngestionSourceExecution); + _deleteNextIngestionSourceExecution = + Objects.requireNonNull(deleteNextIngestionSourceExecution); + _scheduleNextIngestionSourceExecution = + Objects.requireNonNull(scheduleNextIngestionSourceExecution); } @Override public void run() { - // Remove the next ingestion execution as we are going to execute it now. (no retry logic currently) + // Remove the next ingestion execution as we are going to execute it now. (no retry logic + // currently) _deleteNextIngestionSourceExecution.run(); try { - log.info(String.format( - "Creating Execution Request for scheduled Ingestion Source with urn %s", - _ingestionSourceUrn)); + log.info( + String.format( + "Creating Execution Request for scheduled Ingestion Source with urn %s", + _ingestionSourceUrn)); // Create a new Execution Request Proposal final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -341,18 +384,23 @@ public void run() { // Construct arguments (arguments) of the Execution Request final ExecutionRequestInput input = new ExecutionRequestInput(); input.setTask(RUN_INGEST_TASK_NAME); - input.setSource(new ExecutionRequestSource() - .setType(EXECUTION_REQUEST_SOURCE_NAME) - .setIngestionSource(_ingestionSourceUrn)); + input.setSource( + new ExecutionRequestSource() + .setType(EXECUTION_REQUEST_SOURCE_NAME) + .setIngestionSource(_ingestionSourceUrn)); input.setExecutorId(_ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); input.setRequestedAt(System.currentTimeMillis()); Map arguments = new HashMap<>(); - String recipe = IngestionUtils.injectPipelineName(_ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); + String recipe = + IngestionUtils.injectPipelineName( + _ingestionSourceInfo.getConfig().getRecipe(), _ingestionSourceUrn.toString()); arguments.put(RECIPE_ARGUMENT_NAME, recipe); - arguments.put(VERSION_ARGUMENT_NAME, _ingestionSourceInfo.getConfig().hasVersion() - ? _ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion()); + arguments.put( + VERSION_ARGUMENT_NAME, + _ingestionSourceInfo.getConfig().hasVersion() + ? _ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); String debugMode = "false"; if (_ingestionSourceInfo.getConfig().hasDebugMode()) { debugMode = _ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; @@ -368,9 +416,11 @@ public void run() { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (Exception e) { // TODO: This type of thing should likely be proactively reported. - log.error(String.format( - "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", - _ingestionSourceUrn), e); + log.error( + String.format( + "Caught exception while attempting to create Execution Request for Ingestion Source with urn %s. Will retry on next scheduled attempt.", + _ingestionSourceUrn), + e); } // 2. Re-Schedule the next execution request. @@ -380,11 +430,12 @@ public void run() { private String adjustCronInterval(final String origCronInterval) { Objects.requireNonNull(origCronInterval, "origCronInterval must not be null"); - // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make an adjustment here. + // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make + // an adjustment here. final String[] originalCronParts = origCronInterval.split(" "); if (originalCronParts.length == 5) { return String.format("0 %s", origCronInterval); } return origCronInterval; } -} \ No newline at end of file +} diff --git a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java index 51b7fe85f4922..4366ff64ae384 100644 --- a/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java +++ b/ingestion-scheduler/src/test/java/com/datahub/metadata/ingestion/IngestionSchedulerTest.java @@ -1,7 +1,8 @@ package com.datahub.metadata.ingestion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.UrnArray; @@ -10,11 +11,12 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.ingestion.DataHubIngestionSourceConfig; import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.query.ListResult; import java.util.Collections; import java.util.concurrent.Future; @@ -24,8 +26,6 @@ import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class IngestionSchedulerTest { private IngestionScheduler _ingestionScheduler; @@ -36,14 +36,17 @@ public void setupTest() throws Exception { // Init mocks. final Urn ingestionSourceUrn1 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo info1 = new DataHubIngestionSourceInfo(); - info1.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1").setTimezone("America/Los Angeles")); // Run every monday + info1.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1") + .setTimezone("America/Los Angeles")); // Run every monday info1.setType("mysql"); info1.setName("My Test Source"); - info1.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info1.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect1 = new EnvelopedAspect(); envelopedAspect1.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -54,19 +57,23 @@ public void setupTest() throws Exception { final EntityResponse entityResponse1 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse1.getUrn()).thenReturn(ingestionSourceUrn1); - Mockito.when(entityResponse1.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse1.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse1.getAspects()).thenReturn(map1); final Urn ingestionSourceUrn2 = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:1"); final DataHubIngestionSourceInfo info2 = new DataHubIngestionSourceInfo(); - info2.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 0 * * 1 BLUE GREEN").setTimezone("America/Los Angeles")); // Run every monday + info2.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 0 * * 1 BLUE GREEN") + .setTimezone("America/Los Angeles")); // Run every monday info2.setType("invalid"); info2.setName("My Invalid Source"); - info2.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + info2.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); final EnvelopedAspect envelopedAspect2 = new EnvelopedAspect(); envelopedAspect2.setName(Constants.INGESTION_INFO_ASPECT_NAME); @@ -77,35 +84,44 @@ public void setupTest() throws Exception { final EntityResponse entityResponse2 = Mockito.mock(EntityResponse.class); Mockito.when(entityResponse2.getUrn()).thenReturn(ingestionSourceUrn2); - Mockito.when(entityResponse2.getEntityName()).thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); + Mockito.when(entityResponse2.getEntityName()) + .thenReturn(Constants.INGESTION_SOURCE_ENTITY_NAME); Mockito.when(entityResponse2.getAspects()).thenReturn(map2); JavaEntityClient mockClient = Mockito.mock(JavaEntityClient.class); // Set up mocks for ingestion source batch fetching - Mockito.when(mockClient.list( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(30), - Mockito.any() - )).thenReturn(new ListResult().setCount(30).setTotal(2).setStart(0).setEntities( - new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any() - )).thenReturn(ImmutableMap.of( - ingestionSourceUrn1, entityResponse1, - ingestionSourceUrn2, entityResponse2)); - - _ingestionScheduler = new IngestionScheduler( - Mockito.mock(Authentication.class), - mockClient, - Mockito.mock(IngestionConfiguration.class), - 1, - 1200); + Mockito.when( + mockClient.list( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(30), + Mockito.any())) + .thenReturn( + new ListResult() + .setCount(30) + .setTotal(2) + .setStart(0) + .setEntities(new UrnArray(ingestionSourceUrn1, ingestionSourceUrn2))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(ingestionSourceUrn1, ingestionSourceUrn2)), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn( + ImmutableMap.of( + ingestionSourceUrn1, entityResponse1, + ingestionSourceUrn2, entityResponse2)); + + _ingestionScheduler = + new IngestionScheduler( + Mockito.mock(Authentication.class), + mockClient, + Mockito.mock(IngestionConfiguration.class), + 1, + 1200); _ingestionScheduler.init(); Thread.sleep(2000); // Sleep so the runnable can execute. (not ideal) } @@ -115,22 +131,27 @@ public void testInvokeUpdateExistingSchedule() throws Exception { assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); Urn ingestionSourceUrn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); - Future beforeFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future beforeFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(ingestionSourceUrn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 1); - Future newFuture = _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); + Future newFuture = + _ingestionScheduler._nextIngestionSourceExecutionCache.get(ingestionSourceUrn); // Ensure that there is an overwritten future. Assert.assertNotSame(beforeFuture, newFuture); @@ -142,14 +163,17 @@ public void testInvokeNewSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the new source has been scheduled successfully. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -163,14 +187,17 @@ public void testInvokeInvalidSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:2"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); // Invalid schedule set. - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("NOT A SCHEDULE").setTimezone("America/Los Angeles")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("NOT A SCHEDULE") + .setTimezone("America/Los Angeles")); // Run every monday newInfo.setType("snowflake"); newInfo.setName("My Snowflake Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that no changes have been made to next execution cache. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -186,11 +213,11 @@ public void testInvokeMissingSchedule() throws Exception { // No schedule set. newInfo.setType("mysql"); newInfo.setName("My Test Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); // Assert that the schedule has been removed. _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); @@ -218,19 +245,24 @@ public void testSchedule() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); ScheduledFuture future = _ingestionScheduler._nextIngestionSourceExecutionCache.get(urn); - Assert.assertTrue(future.getDelay(TimeUnit.SECONDS) < 60); // Next execution must always be less than a minute away. + Assert.assertTrue( + future.getDelay(TimeUnit.SECONDS) + < 60); // Next execution must always be less than a minute away. } @Test @@ -239,14 +271,17 @@ public void testUnscheduleAll() throws Exception { final Urn urn = Urn.createFromString("urn:li:dataHubIngestionSourceUrn:3"); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("* * * * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("* * * * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source 2"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); _ingestionScheduler.scheduleNextIngestionSourceExecution(urn, newInfo); assertEquals(_ingestionScheduler._nextIngestionSourceExecutionCache.size(), 2); diff --git a/li-utils/src/main/java/com/datahub/util/ModelUtils.java b/li-utils/src/main/java/com/datahub/util/ModelUtils.java index 65379d353de86..538a0d2dfdeb0 100644 --- a/li-utils/src/main/java/com/datahub/util/ModelUtils.java +++ b/li-utils/src/main/java/com/datahub/util/ModelUtils.java @@ -29,7 +29,6 @@ import javax.annotation.Nonnull; import org.reflections.Reflections; - public class ModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); @@ -69,13 +68,15 @@ public static Class getAspectClass(@Nonnull String asp * @return a set of supported aspects */ @Nonnull - public static Set> getValidAspectTypes( - @Nonnull Class aspectUnionClass) { + public static + Set> getValidAspectTypes( + @Nonnull Class aspectUnionClass) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); Set> validTypes = new HashSet<>(); - for (UnionDataSchema.Member member : ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { + for (UnionDataSchema.Member member : + ValidationUtils.getUnionSchema(aspectUnionClass).getMembers()) { if (member.getType().getType() == DataSchema.Type.RECORD) { String fqcn = ((RecordDataSchema) member.getType()).getBindingName(); try { @@ -89,11 +90,10 @@ public static Set Class getClassFromName(@Nonnull String className, @Nonnull Class parentClass) { + public static Class getClassFromName( + @Nonnull String className, @Nonnull Class parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException e) { @@ -108,8 +108,10 @@ public static Class getClassFromName(@Nonnull String className, * @return snapshot class that extends {@link RecordTemplate}, associated with className */ @Nonnull - public static Class getMetadataSnapshotClassFromName(@Nonnull String className) { - Class snapshotClass = getClassFromName(className, RecordTemplate.class); + public static Class getMetadataSnapshotClassFromName( + @Nonnull String className) { + Class snapshotClass = + getClassFromName(className, RecordTemplate.class); SnapshotValidator.validateSnapshotSchema(snapshotClass); return snapshotClass; } @@ -122,13 +124,16 @@ public static Class getMetadataSnapshotClassFromName(@ * @return the extracted {@link Urn} */ @Nonnull - public static Urn getUrnFromSnapshot(@Nonnull SNAPSHOT snapshot) { + public static Urn getUrnFromSnapshot( + @Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); - return RecordUtils.getRecordTemplateField(snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); + return RecordUtils.getRecordTemplateField( + snapshot, "urn", urnClassForSnapshot(snapshot.getClass())); } /** - * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union instead. + * Similar to {@link #getUrnFromSnapshot(RecordTemplate)} but extracts from a Snapshot union + * instead. */ @Nonnull public static Urn getUrnFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { @@ -164,9 +169,11 @@ public static Urn getUrnFromDeltaUnion(@Nonnull UnionTemplate deltaUnion) { * @return the extracted {@link Urn} */ @Nonnull - public static Urn getUrnFromDocument(@Nonnull DOCUMENT document) { + public static Urn getUrnFromDocument( + @Nonnull DOCUMENT document) { DocumentValidator.validateDocumentSchema(document.getClass()); - return RecordUtils.getRecordTemplateField(document, "urn", urnClassForDocument(document.getClass())); + return RecordUtils.getRecordTemplateField( + document, "urn", urnClassForDocument(document.getClass())); } /** @@ -179,37 +186,35 @@ public static Urn getUrnFromDocument(@Nonnull @Nonnull public static Urn getUrnFromEntity(@Nonnull ENTITY entity) { EntityValidator.validateEntitySchema(entity.getClass()); - return RecordUtils.getRecordTemplateField(entity, "urn", urnClassForDocument(entity.getClass())); + return RecordUtils.getRecordTemplateField( + entity, "urn", urnClassForDocument(entity.getClass())); } /** * Extracts the fields with type urn from a relationship. * * @param relationship the relationship to extract urn from - * @param must be a valid relationship model defined in com.linkedin.metadata.relationship + * @param must be a valid relationship model defined in + * com.linkedin.metadata.relationship * @param fieldName name of the field with type urn * @return the extracted {@link Urn} */ @Nonnull - private static Urn getUrnFromRelationship(@Nonnull RELATIONSHIP relationship, - @Nonnull String fieldName) { + private static Urn getUrnFromRelationship( + @Nonnull RELATIONSHIP relationship, @Nonnull String fieldName) { RelationshipValidator.validateRelationshipSchema(relationship.getClass()); - return RecordUtils.getRecordTemplateField(relationship, fieldName, - urnClassForRelationship(relationship.getClass(), fieldName)); + return RecordUtils.getRecordTemplateField( + relationship, fieldName, urnClassForRelationship(relationship.getClass(), fieldName)); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static Urn getSourceUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { return getUrnFromRelationship(relationship, "source"); } - /** - * Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. - */ + /** Similar to {@link #getUrnFromRelationship} but extracts from a delta union instead. */ @Nonnull public static Urn getDestinationUrnFromRelationship( @Nonnull RELATIONSHIP relationship) { @@ -240,8 +245,9 @@ public static List getAspectsF * @return the extracted aspect */ @Nonnull - public static Optional getAspectFromSnapshot( - @Nonnull SNAPSHOT snapshot, @Nonnull Class aspectClass) { + public static + Optional getAspectFromSnapshot( + @Nonnull SNAPSHOT snapshot, @Nonnull Class aspectClass) { return getAspectsFromSnapshot(snapshot).stream() .filter(aspect -> aspect.getClass().equals(aspectClass)) @@ -250,10 +256,12 @@ public static Opt } /** - * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union instead. + * Similar to {@link #getAspectsFromSnapshot(RecordTemplate)} but extracts from a snapshot union + * instead. */ @Nonnull - public static List getAspectsFromSnapshotUnion(@Nonnull UnionTemplate snapshotUnion) { + public static List getAspectsFromSnapshotUnion( + @Nonnull UnionTemplate snapshotUnion) { return getAspects(RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion)); } @@ -261,10 +269,12 @@ public static List getAspectsFromSnapshotUnion(@Nonnull UnionTem private static List getAspects(@Nonnull RecordTemplate snapshot) { final Class clazz = getAspectsArrayClass(snapshot.getClass()); - WrappingArrayTemplate aspectArray = RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + WrappingArrayTemplate aspectArray = + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); final List aspects = new ArrayList<>(); - aspectArray.forEach(item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); + aspectArray.forEach( + item -> aspects.add(RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item))); return aspects; } @@ -280,12 +290,17 @@ private static List getAspects(@Nonnull RecordTemplate snapshot) * @return the created snapshot */ @Nonnull - public static SNAPSHOT newSnapshot( - @Nonnull Class snapshotClass, @Nonnull URN urn, @Nonnull List aspects) { + public static < + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> + SNAPSHOT newSnapshot( + @Nonnull Class snapshotClass, + @Nonnull URN urn, + @Nonnull List aspects) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - final Class aspectArrayClass = getAspectsArrayClass(snapshotClass); + final Class aspectArrayClass = + getAspectsArrayClass(snapshotClass); try { final SNAPSHOT snapshot = snapshotClass.newInstance(); @@ -300,11 +315,15 @@ public static Class getAspectsArrayClass( - @Nonnull Class snapshotClass) { + private static + Class getAspectsArrayClass( + @Nonnull Class snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (NoSuchMethodException | ClassCastException e) { throw new RuntimeException((e)); } @@ -320,8 +339,9 @@ private static Class ASPECT_UNION newAspectUnion( - @Nonnull Class aspectUnionClass, @Nonnull ASPECT aspect) { + public static + ASPECT_UNION newAspectUnion( + @Nonnull Class aspectUnionClass, @Nonnull ASPECT aspect) { AspectValidator.validateAspectUnionSchema(aspectUnionClass); @@ -334,60 +354,57 @@ public static aspectClassForSnapshot( @Nonnull Class snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); - String aspectClassName = ((TyperefDataSchema) ((ArrayDataSchema) ValidationUtils.getRecordSchema(snapshotClass) - .getField("aspects") - .getType()).getItems()).getBindingName(); + String aspectClassName = + ((TyperefDataSchema) + ((ArrayDataSchema) + ValidationUtils.getRecordSchema(snapshotClass) + .getField("aspects") + .getType()) + .getItems()) + .getBindingName(); return getClassFromName(aspectClassName, UnionTemplate.class); } - /** - * Gets the expected {@link Urn} class for a specific kind of entity. - */ + /** Gets the expected {@link Urn} class for a specific kind of entity. */ @Nonnull - public static Class urnClassForEntity(@Nonnull Class entityClass) { + public static Class urnClassForEntity( + @Nonnull Class entityClass) { EntityValidator.validateEntitySchema(entityClass); return urnClassForField(entityClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of snapshot. - */ + /** Gets the expected {@link Urn} class for a specific kind of snapshot. */ @Nonnull - public static Class urnClassForSnapshot(@Nonnull Class snapshotClass) { + public static Class urnClassForSnapshot( + @Nonnull Class snapshotClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); return urnClassForField(snapshotClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of delta. - */ + /** Gets the expected {@link Urn} class for a specific kind of delta. */ @Nonnull - public static Class urnClassForDelta(@Nonnull Class deltaClass) { + public static Class urnClassForDelta( + @Nonnull Class deltaClass) { DeltaValidator.validateDeltaSchema(deltaClass); return urnClassForField(deltaClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of search document. - */ + /** Gets the expected {@link Urn} class for a specific kind of search document. */ @Nonnull - public static Class urnClassForDocument(@Nonnull Class documentClass) { + public static Class urnClassForDocument( + @Nonnull Class documentClass) { DocumentValidator.validateDocumentSchema(documentClass); return urnClassForField(documentClass, "urn"); } - /** - * Gets the expected {@link Urn} class for a specific kind of relationship. - */ + /** Gets the expected {@link Urn} class for a specific kind of relationship. */ @Nonnull private static Class urnClassForRelationship( @Nonnull Class relationshipClass, @Nonnull String fieldName) { @@ -405,7 +422,8 @@ public static Class sourceUrnClassForRelationship( } /** - * Gets the expected {@link Urn} class for the destination field of a specific kind of relationship. + * Gets the expected {@link Urn} class for the destination field of a specific kind of + * relationship. */ @Nonnull public static Class destinationUrnClassForRelationship( @@ -414,35 +432,37 @@ public static Class destinationUrnClassForRelationship( } @Nonnull - private static Class urnClassForField(@Nonnull Class recordClass, - @Nonnull String fieldName) { - String urnClassName = ((DataMap) ValidationUtils.getRecordSchema(recordClass) - .getField(fieldName) - .getType() - .getProperties() - .get("java")).getString("class"); + private static Class urnClassForField( + @Nonnull Class recordClass, @Nonnull String fieldName) { + String urnClassName = + ((DataMap) + ValidationUtils.getRecordSchema(recordClass) + .getField(fieldName) + .getType() + .getProperties() + .get("java")) + .getString("class"); return getClassFromName(urnClassName, Urn.class); } - /** - * Validates a specific snapshot-aspect combination. - */ - public static void validateSnapshotAspect( - @Nonnull Class snapshotClass, @Nonnull Class aspectUnionClass) { + /** Validates a specific snapshot-aspect combination. */ + public static + void validateSnapshotAspect( + @Nonnull Class snapshotClass, @Nonnull Class aspectUnionClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); AspectValidator.validateAspectUnionSchema(aspectUnionClass); // Make sure that SNAPSHOT's "aspects" array field contains ASPECT_UNION type. if (!aspectClassForSnapshot(snapshotClass).equals(aspectUnionClass)) { - throw new InvalidSchemaException(aspectUnionClass.getCanonicalName() + " is not a supported aspect class of " - + snapshotClass.getCanonicalName()); + throw new InvalidSchemaException( + aspectUnionClass.getCanonicalName() + + " is not a supported aspect class of " + + snapshotClass.getCanonicalName()); } } - /** - * Validates a specific snapshot-URN combination. - */ + /** Validates a specific snapshot-URN combination. */ public static void validateSnapshotUrn( @Nonnull Class snapshotClass, @Nonnull Class urnClass) { SnapshotValidator.validateSnapshotSchema(snapshotClass); @@ -450,7 +470,9 @@ public static void validateSn // Make sure that SNAPSHOT's "urn" field uses the correct class or subclasses if (!urnClassForSnapshot(snapshotClass).isAssignableFrom(urnClass)) { throw new InvalidSchemaException( - urnClass.getCanonicalName() + " is not a supported URN class of " + snapshotClass.getCanonicalName()); + urnClass.getCanonicalName() + + " is not a supported URN class of " + + snapshotClass.getCanonicalName()); } } @@ -459,13 +481,16 @@ public static void validateSn * * @param relationshipUnionClass the type of relationship union to create * @param relationship the relationship to set - * @param must be a valid relationship union defined in com.linkedin.metadata.relationship + * @param must be a valid relationship union defined in + * com.linkedin.metadata.relationship * @param must be a supported relationship type in ASPECT_UNION * @return the created relationship union */ @Nonnull - public static RELATIONSHIP_UNION newRelationshipUnion( - @Nonnull Class relationshipUnionClass, @Nonnull RELATIONSHIP relationship) { + public static + RELATIONSHIP_UNION newRelationshipUnion( + @Nonnull Class relationshipUnionClass, + @Nonnull RELATIONSHIP relationship) { RelationshipValidator.validateRelationshipUnionSchema(relationshipUnionClass); @@ -478,20 +503,16 @@ public static > getAllEntities() { - return new Reflections("com.linkedin.metadata.entity").getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return new Reflections("com.linkedin.metadata.entity") + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - /** - * Get entity type from urn class. - */ + /** Get entity type from urn class. */ @Nonnull public static String getEntityTypeFromUrnClass(@Nonnull Class urnClass) { try { @@ -501,13 +522,14 @@ public static String getEntityTypeFromUrnClass(@Nonnull Class urn } } - /** - * Get aspect specific kafka topic name from urn and aspect classes. - */ + /** Get aspect specific kafka topic name from urn and aspect classes. */ @Nonnull - public static String getAspectSpecificMAETopicName(@Nonnull URN urn, - @Nonnull ASPECT newValue) { - return String.format("%s_%s_%s", METADATA_AUDIT_EVENT_PREFIX, urn.getEntityType().toUpperCase(), + public static + String getAspectSpecificMAETopicName(@Nonnull URN urn, @Nonnull ASPECT newValue) { + return String.format( + "%s_%s_%s", + METADATA_AUDIT_EVENT_PREFIX, + urn.getEntityType().toUpperCase(), newValue.getClass().getSimpleName().toUpperCase()); } @@ -521,8 +543,9 @@ public static String getAspectS * @return the created entity union */ @Nonnull - public static ENTITY_UNION newEntityUnion( - @Nonnull Class entityUnionClass, @Nonnull ENTITY entity) { + public static + ENTITY_UNION newEntityUnion( + @Nonnull Class entityUnionClass, @Nonnull ENTITY entity) { EntityValidator.validateEntityUnionSchema(entityUnionClass); diff --git a/li-utils/src/main/java/com/datahub/util/RecordUtils.java b/li-utils/src/main/java/com/datahub/util/RecordUtils.java index a9f8a07742491..d57875f79de61 100644 --- a/li-utils/src/main/java/com/datahub/util/RecordUtils.java +++ b/li-utils/src/main/java/com/datahub/util/RecordUtils.java @@ -33,10 +33,10 @@ import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; - public class RecordUtils { - private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = new JacksonDataTemplateCodec(); + private static final JacksonDataTemplateCodec DATA_TEMPLATE_CODEC = + new JacksonDataTemplateCodec(); private static final String ARRAY_WILDCARD = "*"; private static final Pattern LEADING_SPACESLASH_PATTERN = Pattern.compile("^[/ ]+"); private static final Pattern TRAILING_SPACESLASH_PATTERN = Pattern.compile("[/ ]+$"); @@ -44,10 +44,11 @@ public class RecordUtils { /** * Using in-memory hash map to store the get/is methods of the schema fields of RecordTemplate. - * Here map has RecordTemplate class as key, value being another map of field name with the associated get/is method + * Here map has RecordTemplate class as key, value being another map of field name with the + * associated get/is method */ - private static final ConcurrentHashMap, Map> METHOD_CACHE = - new ConcurrentHashMap<>(); + private static final ConcurrentHashMap, Map> + METHOD_CACHE = new ConcurrentHashMap<>(); private RecordUtils() { // Util class @@ -72,7 +73,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { try { return DATA_TEMPLATE_CODEC.mapToString(recordTemplate.data()); } catch (IOException e) { - throw new ModelConversionException("Failed to serialize RecordTemplate: " + recordTemplate.toString()); + throw new ModelConversionException( + "Failed to serialize RecordTemplate: " + recordTemplate.toString()); } } @@ -85,7 +87,8 @@ public static String toJsonString(@Nonnull RecordTemplate recordTemplate) { * @return the created {@link RecordTemplate} */ @Nonnull - public static T toRecordTemplate(@Nonnull Class type, @Nonnull String jsonString) { + public static T toRecordTemplate( + @Nonnull Class type, @Nonnull String jsonString) { DataMap dataMap; try { dataMap = DATA_TEMPLATE_CODEC.stringToMap(jsonString); @@ -105,18 +108,21 @@ public static T toRecordTemplate(@Nonnull Class ty * @return the created {@link RecordTemplate} */ @Nonnull - public static T toRecordTemplate(@Nonnull Class type, @Nonnull DataMap dataMap) { + public static T toRecordTemplate( + @Nonnull Class type, @Nonnull DataMap dataMap) { Constructor constructor; try { constructor = type.getConstructor(DataMap.class); } catch (NoSuchMethodException e) { - throw new ModelConversionException("Unable to find constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Unable to find constructor for " + type.getCanonicalName(), e); } try { return constructor.newInstance(dataMap); } catch (Exception e) { - throw new ModelConversionException("Failed to invoke constructor for " + type.getCanonicalName(), e); + throw new ModelConversionException( + "Failed to invoke constructor for " + type.getCanonicalName(), e); } } @@ -128,7 +134,8 @@ public static T toRecordTemplate(@Nonnull Class ty * @return the created {@link RecordTemplate} */ @Nonnull - public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnull DataMap dataMap) { + public static RecordTemplate toRecordTemplate( + @Nonnull String className, @Nonnull DataMap dataMap) { Class clazz; try { clazz = Class.forName(className).asSubclass(RecordTemplate.class); @@ -145,34 +152,41 @@ public static RecordTemplate toRecordTemplate(@Nonnull String className, @Nonnul * @param entity the entity value. * @param aspectClass the aspect class. * @return the aspect which is included in the entity. - * */ + */ @Nonnull - public static ASPECT extractAspectFromSingleAspectEntity( - @Nonnull ENTITY entity, @Nonnull Class aspectClass) { + public static + ASPECT extractAspectFromSingleAspectEntity( + @Nonnull ENTITY entity, @Nonnull Class aspectClass) { // Create an empty aspect to extract it's field names final Constructor constructor; try { @SuppressWarnings("rawtypes") - final Class[] constructorParamArray = new Class[]{}; + final Class[] constructorParamArray = new Class[] {}; constructor = aspectClass.getConstructor(constructorParamArray); } catch (NoSuchMethodException e) { - throw new RuntimeException("Exception occurred while trying to get the default constructor for the aspect. ", e); + throw new RuntimeException( + "Exception occurred while trying to get the default constructor for the aspect. ", e); } final ASPECT aspect; try { aspect = constructor.newInstance(); } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException("Exception occurred while creating an instance of the aspect. ", e); + throw new RuntimeException( + "Exception occurred while creating an instance of the aspect. ", e); } final Set aspectFields = - aspect.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + aspect.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); // Get entity's field names and only keep fields which occur in the entity and not in the aspect final Set entityFields = - entity.schema().getFields().stream().map(RecordDataSchema.Field::getName).collect(Collectors.toSet()); + entity.schema().getFields().stream() + .map(RecordDataSchema.Field::getName) + .collect(Collectors.toSet()); entityFields.removeAll(aspectFields); // remove non aspect fields from entity's cloned datamap and use it to create an aspect @@ -194,13 +208,15 @@ public static ASP * @return the field */ @Nonnull - public static RecordDataSchema.Field getRecordDataSchemaField(@Nonnull T recordTemplate, - @Nonnull String fieldName) { + public static RecordDataSchema.Field getRecordDataSchemaField( + @Nonnull T recordTemplate, @Nonnull String fieldName) { RecordDataSchema.Field field = recordTemplate.schema().getField(fieldName); if (field == null) { throw new InvalidSchemaException( - String.format("Missing expected field '%s' in %s", fieldName, recordTemplate.getClass().getCanonicalName())); + String.format( + "Missing expected field '%s' in %s", + fieldName, recordTemplate.getClass().getCanonicalName())); } return field; } @@ -212,14 +228,20 @@ public static RecordDataSchema.Field getRecordDataSch * @param fieldName the name of the field to update * @param value the value to set */ - public static void setRecordTemplatePrimitiveField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull V value) { + public static void setRecordTemplatePrimitiveField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull V value) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putDirect = - getProtectedMethod(RecordTemplate.class, "putDirect", RecordDataSchema.Field.class, Class.class, Object.class, + getProtectedMethod( + RecordTemplate.class, + "putDirect", + RecordDataSchema.Field.class, + Class.class, + Object.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); + invokeProtectedMethod( + recordTemplate, putDirect, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -234,9 +256,15 @@ public static void setRecordTemplateComplexField( final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method putWrapped = - getProtectedMethod(RecordTemplate.class, "putWrapped", RecordDataSchema.Field.class, Class.class, - DataTemplate.class, SetMode.class); - invokeProtectedMethod(recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); + getProtectedMethod( + RecordTemplate.class, + "putWrapped", + RecordDataSchema.Field.class, + Class.class, + DataTemplate.class, + SetMode.class); + invokeProtectedMethod( + recordTemplate, putWrapped, field, value.getClass(), value, SetMode.DISALLOW_NULL); } /** @@ -248,14 +276,19 @@ public static void setRecordTemplateComplexField( * @return the value for the field */ @Nonnull - public static V getRecordTemplateField(@Nonnull T recordTemplate, - @Nonnull String fieldName, @Nonnull Class valueClass) { + public static V getRecordTemplateField( + @Nonnull T recordTemplate, @Nonnull String fieldName, @Nonnull Class valueClass) { final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainCustomType = - getProtectedMethod(RecordTemplate.class, "obtainCustomType", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainCustomType", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainCustomType, field, valueClass, GetMode.STRICT); } /** @@ -272,9 +305,14 @@ public static V getRecordTemp final RecordDataSchema.Field field = getRecordDataSchemaField(recordTemplate, fieldName); final Method obtainWrapped = - getProtectedMethod(RecordTemplate.class, "obtainWrapped", RecordDataSchema.Field.class, Class.class, + getProtectedMethod( + RecordTemplate.class, + "obtainWrapped", + RecordDataSchema.Field.class, + Class.class, GetMode.class); - return (V) invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); + return (V) + invokeProtectedMethod(recordTemplate, obtainWrapped, field, valueClass, GetMode.STRICT); } /** @@ -290,22 +328,33 @@ public static RecordTemplate getSelectedRecordTemplat final DataSchema dataSchema = unionTemplate.memberType(); if (!(dataSchema instanceof RecordDataSchema)) { throw new InvalidSchemaException( - "The currently selected member isn't a RecordTemplate in " + unionTemplate.getClass().getCanonicalName()); + "The currently selected member isn't a RecordTemplate in " + + unionTemplate.getClass().getCanonicalName()); } final Class clazz = - ModelUtils.getClassFromName(((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); + ModelUtils.getClassFromName( + ((RecordDataSchema) dataSchema).getBindingName(), RecordTemplate.class); final Method obtainWrapped = - getProtectedMethod(UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); - final List members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + getProtectedMethod( + UnionTemplate.class, "obtainWrapped", DataSchema.class, Class.class, String.class); + final List members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); + if (m.hasAlias() + && m.getType().getDereferencedDataSchema().getUnionMemberKey().equals(clazz.getName())) { + return (V) + invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, m.getAlias()); } } - return (V) invokeProtectedMethod(unionTemplate, obtainWrapped, dataSchema, clazz, - ((RecordDataSchema) dataSchema).getFullName()); + return (V) + invokeProtectedMethod( + unionTemplate, + obtainWrapped, + dataSchema, + clazz, + ((RecordDataSchema) dataSchema).getFullName()); } /** @@ -320,25 +369,44 @@ public static RecordTemplate setSelectedRecordTemplat @Nonnull UnionTemplate unionTemplate, @Nonnull RecordTemplate selectedMember) { final Method selectWrapped = - getProtectedMethod(UnionTemplate.class, "selectWrapped", DataSchema.class, Class.class, String.class, + getProtectedMethod( + UnionTemplate.class, + "selectWrapped", + DataSchema.class, + Class.class, + String.class, DataTemplate.class); - final List members = ((UnionDataSchema) unionTemplate.schema()).getMembers(); + final List members = + ((UnionDataSchema) unionTemplate.schema()).getMembers(); for (UnionDataSchema.Member m : members) { - if (m.hasAlias() && m.getType() - .getDereferencedDataSchema() - .getUnionMemberKey() - .equals(selectedMember.getClass().getName())) { - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), - selectedMember.getClass(), m.getAlias(), selectedMember); + if (m.hasAlias() + && m.getType() + .getDereferencedDataSchema() + .getUnionMemberKey() + .equals(selectedMember.getClass().getName())) { + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + m.getAlias(), + selectedMember); } } - return (V) invokeProtectedMethod(unionTemplate, selectWrapped, selectedMember.schema(), selectedMember.getClass(), - selectedMember.schema().getUnionMemberKey(), selectedMember); + return (V) + invokeProtectedMethod( + unionTemplate, + selectWrapped, + selectedMember.schema(), + selectedMember.getClass(), + selectedMember.schema().getUnionMemberKey(), + selectedMember); } @Nonnull - private static Method getProtectedMethod(@Nonnull Class clazz, @Nonnull String methodName, - @Nonnull Class... parameterTypes) { + private static Method getProtectedMethod( + @Nonnull Class clazz, @Nonnull String methodName, @Nonnull Class... parameterTypes) { try { return clazz.getDeclaredMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { @@ -359,26 +427,32 @@ private static T invokeProtectedMethod(Object object, Method method, Object. } @Nonnull - private static Map getMethodsFromRecordTemplate(@Nonnull RecordTemplate recordTemplate) { + private static Map getMethodsFromRecordTemplate( + @Nonnull RecordTemplate recordTemplate) { final HashMap methodMap = new HashMap<>(); for (RecordDataSchema.Field field : recordTemplate.schema().getFields()) { final String capitalizedName = capitalizeFirst(field.getName()); final String getMethodName = - (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + capitalizedName; + (field.getType().getType().equals(RecordDataSchema.Type.BOOLEAN) ? "is" : "get") + + capitalizedName; try { methodMap.put(field.getName(), recordTemplate.getClass().getMethod(getMethodName)); } catch (NoSuchMethodException e) { - throw new RuntimeException(String.format("Failed to get method [%s], for class [%s], field [%s]", getMethodName, - recordTemplate.getClass().getCanonicalName(), field.getName()), e); + throw new RuntimeException( + String.format( + "Failed to get method [%s], for class [%s], field [%s]", + getMethodName, recordTemplate.getClass().getCanonicalName(), field.getName()), + e); } } return Collections.unmodifiableMap(methodMap); } /** - * Given a {@link RecordTemplate} and field name, this will find and execute getFieldName/isFieldName and return the result - * If neither getFieldName/isFieldName has been called for any of the fields of the RecordTemplate, then the get/is method - * for all schema fields of the record will be found and subsequently cached. + * Given a {@link RecordTemplate} and field name, this will find and execute + * getFieldName/isFieldName and return the result If neither getFieldName/isFieldName has been + * called for any of the fields of the RecordTemplate, then the get/is method for all schema + * fields of the record will be found and subsequently cached. * * @param record {@link RecordTemplate} whose field has to be referenced * @param fieldName field name of the record that has to be referenced @@ -391,8 +465,10 @@ private static Object invokeMethod(@Nonnull RecordTemplate record, @Nonnull Stri return METHOD_CACHE.get(record.getClass()).get(fieldName).invoke(record); } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException( - String.format("Failed to execute method for class [%s], field [%s]", record.getClass().getCanonicalName(), - fieldName), e); + String.format( + "Failed to execute method for class [%s], field [%s]", + record.getClass().getCanonicalName(), fieldName), + e); } } @@ -402,21 +478,24 @@ private static Object getUnionMember(@Nonnull UnionTemplate union, @Nonnull Stri return ((DataMap) union.data()).get(memberName); } throw new RuntimeException( - String.format("Failed to extract member from union [%s], member [%s]", union.getClass().getCanonicalName(), - memberName)); + String.format( + "Failed to extract member from union [%s], member [%s]", + union.getClass().getCanonicalName(), memberName)); } /** - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Helper method for referencing array of RecordTemplate objects. Referencing a particular index + * or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nonnull @SuppressWarnings("rawtypes") - private static List getReferenceForAbstractArray(@Nonnull AbstractArrayTemplate reference, - @Nonnull PathSpec ps) { + private static List getReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getFieldValue(x, ps)) @@ -427,17 +506,19 @@ private static List getReferenceForAbstractArray(@Nonnull AbstractArrayT } /** - * Nullable version of the method above. Allows us to get null values in a list in the correct oder. - * Helper method for referencing array of RecordTemplate objects. Referencing a particular index or range of indices of an array is not supported. + * Nullable version of the method above. Allows us to get null values in a list in the correct + * oder. Helper method for referencing array of RecordTemplate objects. Referencing a particular + * index or range of indices of an array is not supported. * - * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} which needs to be referenced + * @param reference {@link AbstractArrayTemplate} corresponding to array of {@link RecordTemplate} + * which needs to be referenced * @param ps {@link PathSpec} for the entire path inside the array that needs to be referenced * @return {@link List} of objects from the array, referenced using the PathSpec */ @Nullable @SuppressWarnings("rawtypes") - private static List getNullableReferenceForAbstractArray(@Nonnull AbstractArrayTemplate reference, - @Nonnull PathSpec ps) { + private static List getNullableReferenceForAbstractArray( + @Nonnull AbstractArrayTemplate reference, @Nonnull PathSpec ps) { if (!reference.isEmpty()) { return Arrays.stream((reference).toArray()) .map(x -> getNullableFieldValue(x, ps)) @@ -447,11 +528,12 @@ private static List getNullableReferenceForAbstractArray(@Nonnull Abstra } /** - * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus PathSpec as - * input. + * Similar to {@link #getFieldValue(Object, PathSpec)} but takes string representation of Pegasus + * PathSpec as input. */ @Nonnull - public static Optional getFieldValue(@Nonnull Object record, @Nonnull String pathSpecAsString) { + public static Optional getFieldValue( + @Nonnull Object record, @Nonnull String pathSpecAsString) { pathSpecAsString = LEADING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); pathSpecAsString = TRAILING_SPACESLASH_PATTERN.matcher(pathSpecAsString).replaceAll(""); @@ -462,13 +544,16 @@ public static Optional getFieldValue(@Nonnull Object record, @Nonnull St } /** - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported. + * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of + * the path from the record. This handles only RecordTemplate, fields of which can be primitive + * types, typeRefs, arrays of primitive types or array of records. Fetching of values in a + * RecordTemplate where the field has a default value will return the field default value. + * Referencing field corresponding to a particular index or range of indices of an array is not + * supported. Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) + * FixedTemplate are currently not supported. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -484,7 +569,8 @@ public static Optional getFieldValue(@Nonnull Object record, @Nonnull Pa } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); @@ -497,8 +583,10 @@ public static Optional getFieldValue(@Nonnull Object record, @Nonnull Pa return Optional.empty(); } } else if (reference instanceof AbstractArrayTemplate) { - return Optional.of(getReferenceForAbstractArray((AbstractArrayTemplate) reference, - new PathSpec(ps.getPathComponents().subList(i, pathSize)))); + return Optional.of( + getReferenceForAbstractArray( + (AbstractArrayTemplate) reference, + new PathSpec(ps.getPathComponents().subList(i, pathSize)))); } else { throw new UnsupportedOperationException( String.format("Failed at extracting %s (%s from %s)", part, ps, record)); @@ -508,16 +596,20 @@ public static Optional getFieldValue(@Nonnull Object record, @Nonnull Pa } /** - * A nullable version of the getFieldValue method above. This is used when grabbing values from aspects based on field specs - * on Relationship annotations. This allows us to get null values for fields that don't have a value for a given path spec. - * Then we can map values correctly based on list indices creating graph edges. - * Given a {@link Object} and {@link com.linkedin.data.schema.PathSpec} this will return value of the path from the record. - * This handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of primitive types or array of records. - * Fetching of values in a RecordTemplate where the field has a default value will return the field default value. - * Referencing field corresponding to a particular index or range of indices of an array is not supported. - * Fields corresponding to 1) multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, return null. + * A nullable version of the getFieldValue method above. This is used when grabbing values from + * aspects based on field specs on Relationship annotations. This allows us to get null values for + * fields that don't have a value for a given path spec. Then we can map values correctly based on + * list indices creating graph edges. Given a {@link Object} and {@link + * com.linkedin.data.schema.PathSpec} this will return value of the path from the record. This + * handles only RecordTemplate, fields of which can be primitive types, typeRefs, arrays of + * primitive types or array of records. Fetching of values in a RecordTemplate where the field has + * a default value will return the field default value. Referencing field corresponding to a + * particular index or range of indices of an array is not supported. Fields corresponding to 1) + * multi-dimensional array 2) AbstractMapTemplate 3) FixedTemplate are currently not supported, + * return null. * - * @param record {@link Object} Object to traverse the path. If record is of primitive type, and path is not empty, it will fail to traverse. + * @param record {@link Object} Object to traverse the path. If record is of primitive type, and + * path is not empty, it will fail to traverse. * @param ps {@link PathSpec} representing the path whose value needs to be returned * @return Referenced object of the RecordTemplate corresponding to the PathSpec */ @@ -533,14 +625,16 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } if (StringUtils.isNumeric(part)) { throw new UnsupportedOperationException( - String.format("Array indexing is not supported for %s (%s from %s)", part, ps, reference)); + String.format( + "Array indexing is not supported for %s (%s from %s)", part, ps, reference)); } if (reference instanceof RecordTemplate) { reference = invokeMethod((RecordTemplate) reference, part); } else if (reference instanceof UnionTemplate) { reference = getUnionMember((UnionTemplate) reference, part); } else if (reference instanceof AbstractArrayTemplate) { - return getNullableReferenceForAbstractArray((AbstractArrayTemplate) reference, + return getNullableReferenceForAbstractArray( + (AbstractArrayTemplate) reference, new PathSpec(ps.getPathComponents().subList(i, pathSize))); } else { return null; @@ -548,5 +642,4 @@ public static Object getNullableFieldValue(@Nonnull Object record, @Nonnull Path } return reference; } - } diff --git a/li-utils/src/main/java/com/datahub/util/Statement.java b/li-utils/src/main/java/com/datahub/util/Statement.java index c30a5e9b70c76..f2c56a409312c 100644 --- a/li-utils/src/main/java/com/datahub/util/Statement.java +++ b/li-utils/src/main/java/com/datahub/util/Statement.java @@ -5,7 +5,6 @@ import lombok.NonNull; import lombok.Value; - @Value public class Statement { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java index 3df693e59adf7..56b97a3fb8233 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ESQueryException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when elastic search query fails. - */ +/** An exception to be thrown when elastic search query fails. */ public class ESQueryException extends RuntimeException { public ESQueryException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java index 019e6896eb006..b7e182df527bf 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/InvalidSchemaException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * Thrown when a schema didn't match the expectation. - */ +/** Thrown when a schema didn't match the expectation. */ public class InvalidSchemaException extends RuntimeException { public InvalidSchemaException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java index bab319812bed9..2a1784f6d7197 100644 --- a/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java +++ b/li-utils/src/main/java/com/datahub/util/exception/ModelConversionException.java @@ -1,8 +1,6 @@ package com.datahub.util.exception; -/** - * An exception to be thrown when Model Conversion fails. - */ +/** An exception to be thrown when Model Conversion fails. */ public class ModelConversionException extends RuntimeException { public ModelConversionException(String message) { diff --git a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java index e0533cb2d2502..fc082abf22771 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/AspectValidator.java @@ -8,14 +8,12 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate aspects are part of the union schemas. - */ +/** Utility class to validate aspects are part of the union schemas. */ public final class AspectValidator { // A cache of validated classes - private static final Set> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); private AspectValidator() { // Util class @@ -26,15 +24,18 @@ private AspectValidator() { * * @param schema schema for the model */ - public static void validateAspectUnionSchema(@Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { + public static void validateAspectUnionSchema( + @Nonnull UnionDataSchema schema, @Nonnull String aspectClassName) { if (!ValidationUtils.isUnionWithOnlyComplexMembers(schema)) { - ValidationUtils.invalidSchema("Aspect '%s' must be a union containing only record type members", aspectClassName); + ValidationUtils.invalidSchema( + "Aspect '%s' must be a union containing only record type members", aspectClassName); } } /** - * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateAspectUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateAspectUnionSchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -46,8 +47,9 @@ public static void validateAspectUnionSchema(@Nonnull Class> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); private DeltaValidator() { // Util class @@ -30,17 +28,19 @@ public static void validateDeltaSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'urn' field of URN type", className); } if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, DeltaValidator::isValidDeltaField)) { - ValidationUtils.invalidSchema("Delta '%s' must contain an non-optional 'delta' field of UNION type", - className); + ValidationUtils.invalidSchema( + "Delta '%s' must contain an non-optional 'delta' field of UNION type", className); } } /** - * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDeltaSchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateDeltaSchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -52,7 +52,8 @@ public static void validateDeltaSchema(@Nonnull Class } private static boolean isValidDeltaField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("delta") && !field.getOptional() + return field.getName().equals("delta") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.UNION; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java index 31898c01f4233..c8741d2ccea83 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/DocumentValidator.java @@ -8,21 +8,21 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - -/** - * Utility class to validate search document schemas. - */ +/** Utility class to validate search document schemas. */ public final class DocumentValidator { // Allowed non-optional fields. All other fields must be optional. - private static final Set NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet() { - { - add("urn"); - } - }); + private static final Set NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); private DocumentValidator() { // Util class @@ -38,21 +38,29 @@ public static void validateDocumentSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Document '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Document '%s' must contain an non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Document '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Document '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateDocumentSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateDocumentSchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -62,4 +70,4 @@ public static void validateDocumentSchema(@Nonnull Class NON_OPTIONAL_FIELDS = Collections.unmodifiableSet(new HashSet() { - { - add("urn"); - } - }); + private static final Set NON_OPTIONAL_FIELDS = + Collections.unmodifiableSet( + new HashSet() { + { + add("urn"); + } + }); // A cache of validated classes - private static final Set> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); - + private static final Set> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); private EntityValidator() { // Util class @@ -45,21 +45,29 @@ public static void validateEntitySchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Entity '%s' must contain a non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Entity '%s' must contain a non-optional 'urn' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); - - ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS).forEach(field -> { - ValidationUtils.invalidSchema("Entity '%s' must contain an optional '%s' field", className, field.getName()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); + + ValidationUtils.nonOptionalFields(schema, NON_OPTIONAL_FIELDS) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Entity '%s' must contain an optional '%s' field", className, field.getName()); + }); } /** - * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateEntitySchema(RecordDataSchema)} but take a {@link Class} instead and + * caches results. */ public static void validateEntitySchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -71,8 +79,8 @@ public static void validateEntitySchema(@Nonnull Class } /** - * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} instead and caches - * results. + * Similar to {@link #validateEntityUnionSchema(UnionDataSchema, String)} but take a {@link Class} + * instead and caches results. */ public static void validateEntityUnionSchema(@Nonnull Class clazz) { if (UNION_VALIDATED.contains(clazz)) { @@ -88,16 +96,16 @@ public static void validateEntityUnionSchema(@Nonnull Class clazz) { if (!VALIDATED.contains(clazz)) { try { @@ -109,4 +117,4 @@ public static boolean isValidEntitySchema(@Nonnull Class> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); // A cache of validated classes - private static final Set> UNION_VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> UNION_VALIDATED = + ConcurrentHashMap.newKeySet(); @Value private static class Pair { @@ -42,29 +43,33 @@ public static void validateRelationshipSchema(@Nonnull RecordDataSchema schema) final String className = schema.getBindingName(); - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "source"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'source' field of URN type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "source"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'source' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, - field -> ValidationUtils.isValidUrnField(field, "destination"))) { - ValidationUtils.invalidSchema("Relationship '%s' must contain an non-optional 'destination' field of URN type", + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, field -> ValidationUtils.isValidUrnField(field, "destination"))) { + ValidationUtils.invalidSchema( + "Relationship '%s' must contain an non-optional 'destination' field of URN type", className); } - ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES).forEach(field -> { - ValidationUtils.invalidSchema("Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", - className, field.getName(), field.getType().getType()); - }); + ValidationUtils.fieldsUsingInvalidType(schema, ValidationUtils.PRIMITIVE_TYPES) + .forEach( + field -> { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a field '%s' that makes use of a disallowed type '%s'.", + className, field.getName(), field.getType().getType()); + }); validatePairings(schema); } - /** - * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateRelationshipSchema(RecordDataSchema)} but take a {@link Class} + * instead and caches results. */ public static void validateRelationshipSchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -76,14 +81,17 @@ public static void validateRelationshipSchema(@Nonnull Class clazz) { + public static void validateRelationshipUnionSchema( + @Nonnull Class clazz) { if (UNION_VALIDATED.contains(clazz)) { return; } - validateRelationshipUnionSchema(ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); + validateRelationshipUnionSchema( + ValidationUtils.getUnionSchema(clazz), clazz.getCanonicalName()); UNION_VALIDATED.add(clazz); } @@ -92,10 +100,13 @@ public static void validateRelationshipUnionSchema(@Nonnull Class properties = schema.getProperties(); if (!properties.containsKey("pairings")) { - ValidationUtils.invalidSchema("Relationship '%s' must contain a 'pairings' property", className); + ValidationUtils.invalidSchema( + "Relationship '%s' must contain a 'pairings' property", className); } DataList pairings = (DataList) properties.get("pairings"); Set registeredPairs = new HashSet<>(); - pairings.stream().forEach(obj -> { - DataMap map = (DataMap) obj; - if (!map.containsKey("source") || !map.containsKey("destination")) { - ValidationUtils.invalidSchema("Relationship '%s' contains an invalid 'pairings' item. " - + "Each item must contain a 'source' and 'destination' properties.", className); - } - - String sourceUrn = map.getString("source"); - if (!isValidUrnClass(sourceUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - sourceUrn); - } - - String destinationUrn = map.getString("destination"); - if (!isValidUrnClass(destinationUrn)) { - ValidationUtils.invalidSchema( - "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", className, - destinationUrn); - } - - Pair pair = new Pair(sourceUrn, destinationUrn); - if (registeredPairs.contains(pair)) { - ValidationUtils.invalidSchema("Relationship '%s' contains a repeated 'pairings' item (%s, %s)", className, - sourceUrn, destinationUrn); - } - registeredPairs.add(pair); - }); + pairings.stream() + .forEach( + obj -> { + DataMap map = (DataMap) obj; + if (!map.containsKey("source") || !map.containsKey("destination")) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid 'pairings' item. " + + "Each item must contain a 'source' and 'destination' properties.", + className); + } + + String sourceUrn = map.getString("source"); + if (!isValidUrnClass(sourceUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, sourceUrn); + } + + String destinationUrn = map.getString("destination"); + if (!isValidUrnClass(destinationUrn)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains an invalid item in 'pairings'. %s is not a valid URN class name.", + className, destinationUrn); + } + + Pair pair = new Pair(sourceUrn, destinationUrn); + if (registeredPairs.contains(pair)) { + ValidationUtils.invalidSchema( + "Relationship '%s' contains a repeated 'pairings' item (%s, %s)", + className, sourceUrn, destinationUrn); + } + registeredPairs.add(pair); + }); } private static boolean isValidUrnClass(String className) { @@ -147,4 +164,4 @@ private static boolean isValidUrnClass(String className) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java index 988fabe0411c8..08f349b146db6 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java +++ b/li-utils/src/main/java/com/datahub/util/validator/SnapshotValidator.java @@ -13,11 +13,11 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nonnull; - public class SnapshotValidator { // A cache of validated classes - private static final Set> VALIDATED = ConcurrentHashMap.newKeySet(); + private static final Set> VALIDATED = + ConcurrentHashMap.newKeySet(); private SnapshotValidator() { // Util class @@ -33,19 +33,22 @@ public static void validateSnapshotSchema(@Nonnull RecordDataSchema schema) { final String className = schema.getBindingName(); if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, ValidationUtils::isValidUrnField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'urn' field of URN type", className); } - if (!ValidationUtils.schemaHasExactlyOneSuchField(schema, SnapshotValidator::isValidAspectsField)) { - ValidationUtils.invalidSchema("Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", - className); + if (!ValidationUtils.schemaHasExactlyOneSuchField( + schema, SnapshotValidator::isValidAspectsField)) { + ValidationUtils.invalidSchema( + "Snapshot '%s' must contain an non-optional 'aspects' field of ARRAY type", className); } validateAspectsItemType(schema.getField("aspects"), className); } /** - * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead and caches results. + * Similar to {@link #validateSnapshotSchema(RecordDataSchema)} but take a {@link Class} instead + * and caches results. */ public static void validateSnapshotSchema(@Nonnull Class clazz) { if (VALIDATED.contains(clazz)) { @@ -61,38 +64,47 @@ public static void validateSnapshotSchema(@Nonnull Class> snapshotClasses) { + public static void validateUniqueUrn( + @Nonnull Collection> snapshotClasses) { final Set> urnClasses = new HashSet<>(); - snapshotClasses.forEach(snapshotClass -> { - final Class urnClass = - ValidationUtils.getUrnClass(ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); - if (urnClasses.contains(urnClass)) { - ValidationUtils.invalidSchema("URN class %s in %s has already been claimed by another snapshot.", urnClass, - snapshotClass); - } - urnClasses.add(urnClass); - }); + snapshotClasses.forEach( + snapshotClass -> { + final Class urnClass = + ValidationUtils.getUrnClass( + ValidationUtils.getRecordSchema(snapshotClass).getField("urn")); + if (urnClasses.contains(urnClass)) { + ValidationUtils.invalidSchema( + "URN class %s in %s has already been claimed by another snapshot.", + urnClass, snapshotClass); + } + urnClasses.add(urnClass); + }); } private static boolean isValidAspectsField(@Nonnull RecordDataSchema.Field field) { - return field.getName().equals("aspects") && !field.getOptional() + return field.getName().equals("aspects") + && !field.getOptional() && field.getType().getType() == DataSchema.Type.ARRAY; } - private static void validateAspectsItemType(@Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { + private static void validateAspectsItemType( + @Nonnull RecordDataSchema.Field aspectsField, @Nonnull String className) { DataSchema itemSchema = ((ArrayDataSchema) aspectsField.getType()).getItems(); if (itemSchema.getType() != DataSchema.Type.TYPEREF) { - ValidationUtils.invalidSchema("Snapshot %s' 'aspects' field must be an array of aspect typeref", className); + ValidationUtils.invalidSchema( + "Snapshot %s' 'aspects' field must be an array of aspect typeref", className); } TyperefDataSchema typerefSchema = (TyperefDataSchema) itemSchema; DataSchema unionSchema = typerefSchema.getDereferencedDataSchema(); if (unionSchema.getType() != DataSchema.Type.UNION) { - ValidationUtils.invalidSchema("Snapshot '%s' 'aspects' field must be an array of union typeref", className); + ValidationUtils.invalidSchema( + "Snapshot '%s' 'aspects' field must be an array of union typeref", className); } - AspectValidator.validateAspectUnionSchema((UnionDataSchema) unionSchema, typerefSchema.getBindingName()); + AspectValidator.validateAspectUnionSchema( + (UnionDataSchema) unionSchema, typerefSchema.getBindingName()); } } diff --git a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java index 1af6de8ff3940..5b38ff21e4b81 100644 --- a/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java +++ b/li-utils/src/main/java/com/datahub/util/validator/ValidationUtils.java @@ -1,7 +1,7 @@ package com.datahub.util.validator; -import com.linkedin.common.urn.Urn; import com.datahub.util.exception.InvalidSchemaException; +import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.schema.ArrayDataSchema; import com.linkedin.data.schema.DataSchema; @@ -18,24 +18,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Utility class for schema validation classes. - */ +/** Utility class for schema validation classes. */ public final class ValidationUtils { public static final Set PRIMITIVE_TYPES = - Collections.unmodifiableSet(new HashSet() { - { - add(DataSchema.Type.BOOLEAN); - add(DataSchema.Type.INT); - add(DataSchema.Type.LONG); - add(DataSchema.Type.FLOAT); - add(DataSchema.Type.DOUBLE); - add(DataSchema.Type.STRING); - add(DataSchema.Type.ENUM); - } - }); + Collections.unmodifiableSet( + new HashSet() { + { + add(DataSchema.Type.BOOLEAN); + add(DataSchema.Type.INT); + add(DataSchema.Type.LONG); + add(DataSchema.Type.FLOAT); + add(DataSchema.Type.DOUBLE); + add(DataSchema.Type.STRING); + add(DataSchema.Type.ENUM); + } + }); private ValidationUtils() { // Util class @@ -45,9 +43,7 @@ public static void invalidSchema(@Nonnull String format, Object... args) { throw new InvalidSchemaException(String.format(format, args)); } - /** - * Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. - */ + /** Gets the {@link RecordDataSchema} of a {@link RecordTemplate} via reflection. */ @Nonnull public static RecordDataSchema getRecordSchema(@Nonnull Class clazz) { try { @@ -61,9 +57,7 @@ public static RecordDataSchema getRecordSchema(@Nonnull Class clazz) { try { @@ -77,30 +71,29 @@ public static UnionDataSchema getUnionSchema(@Nonnull Class predicate) { + /** Returns true if the supply schema has exactly one field matching the predicate. */ + public static boolean schemaHasExactlyOneSuchField( + @Nonnull RecordDataSchema schema, @Nonnull Predicate predicate) { return schema.getFields().stream().filter(predicate).count() == 1; } - /** - * Returns true if the non-optional field matches the field name and has a URN type. - */ - public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { - return field.getName().equals(fieldName) && !field.getOptional() - && field.getType().getType() == DataSchema.Type.TYPEREF && Urn.class.isAssignableFrom(getUrnClass(field)); + /** Returns true if the non-optional field matches the field name and has a URN type. */ + public static boolean isValidUrnField( + @Nonnull RecordDataSchema.Field field, @Nonnull String fieldName) { + return field.getName().equals(fieldName) + && !field.getOptional() + && field.getType().getType() == DataSchema.Type.TYPEREF + && Urn.class.isAssignableFrom(getUrnClass(field)); } - /** - * Returns the Java class for an URN typeref field. - */ + /** Returns the Java class for an URN typeref field. */ public static Class getUrnClass(@Nonnull RecordDataSchema.Field field) { try { @SuppressWarnings("unchecked") final Class clazz = - (Class) Class.forName(((DataMap) field.getType().getProperties().get("java")).getString("class")); + (Class) + Class.forName( + ((DataMap) field.getType().getProperties().get("java")).getString("class")); return clazz; } catch (ClassNotFoundException e) { throw new RuntimeException(e); @@ -108,52 +101,55 @@ public static Class getUrnClass(@Nonnull RecordDataSchema.Field field) { } /** - * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field "urn". + * Similar to {@link #isValidUrnField(RecordDataSchema.Field, String)} but with a fixed field + * "urn". */ public static boolean isValidUrnField(@Nonnull RecordDataSchema.Field field) { return isValidUrnField(field, "urn"); } - /** - * Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, non-optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List nonOptionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (!field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List nonOptionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (!field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } - /** - * Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. - */ + /** Returns all the non-whitelisted, optional fields in a {@link RecordDataSchema}. */ @Nonnull - public static List optionalFields(@Nonnull RecordDataSchema schema, - @Nonnull Set whitelistedFields) { - return schema.getFields().stream().filter(field -> { - if (!whitelistedFields.contains(field.getName())) { - if (field.getOptional()) { - return true; - } - } - return false; - }).collect(Collectors.toList()); + public static List optionalFields( + @Nonnull RecordDataSchema schema, @Nonnull Set whitelistedFields) { + return schema.getFields().stream() + .filter( + field -> { + if (!whitelistedFields.contains(field.getName())) { + if (field.getOptional()) { + return true; + } + } + return false; + }) + .collect(Collectors.toList()); } /** - * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed types. + * Return all the fields in a {@link RecordDataSchema} that are not using one of the allowed + * types. */ @Nonnull - public static List fieldsUsingInvalidType(@Nonnull RecordDataSchema schema, - @Nonnull Set allowedTypes) { - return schema.getFields() - .stream() + public static List fieldsUsingInvalidType( + @Nonnull RecordDataSchema schema, @Nonnull Set allowedTypes) { + return schema.getFields().stream() .filter(field -> !allowedTypes.contains(getFieldOrArrayItemType(field))) .collect(Collectors.toList()); } @@ -164,8 +160,10 @@ public static boolean isUnionWithOnlyComplexMembers(UnionDataSchema unionDataSch @Nonnull private static DataSchema.Type getFieldOrArrayItemType(@Nonnull RecordDataSchema.Field field) { - DataSchema type = field.getType().getType() == DataSchema.Type.ARRAY - ? ((ArrayDataSchema) field.getType()).getItems() : field.getType(); + DataSchema type = + field.getType().getType() == DataSchema.Type.ARRAY + ? ((ArrayDataSchema) field.getType()).getItems() + : field.getType(); if (type.getType() == DataSchema.Type.TYPEREF) { return type.getDereferencedType(); } diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index 972f52b8824ce..f5a3c9c12ff70 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -2,42 +2,42 @@ import com.linkedin.common.urn.Urn; - -/** - * Static class containing commonly-used constants across DataHub services. - */ +/** Static class containing commonly-used constants across DataHub services. */ public class Constants { public static final String INTERNAL_DELEGATED_FOR_ACTOR_HEADER_NAME = "X-DataHub-Delegated-For"; public static final String INTERNAL_DELEGATED_FOR_ACTOR_TYPE = "X-DataHub-Delegated-For-"; public static final String DATAHUB_ACTOR = "urn:li:corpuser:datahub"; // Super user. - public static final String SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. + public static final String SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system"; // DataHub internal service principal. public static final String UNKNOWN_ACTOR = "urn:li:corpuser:UNKNOWN"; // Unknown principal. public static final Long ASPECT_LATEST_VERSION = 0L; public static final String UNKNOWN_DATA_PLATFORM = "urn:li:dataPlatform:unknown"; // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is // whatever Jackson is defaulting to (5 MB currently). public static final String MAX_JACKSON_STRING_SIZE = "16000000"; - public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; + public static final String INGESTION_MAX_SERIALIZED_STRING_LENGTH = + "INGESTION_MAX_SERIALIZED_STRING_LENGTH"; - /** - * System Metadata - */ + /** System Metadata */ public static final String DEFAULT_RUN_ID = "no-run-id-provided"; - // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op case + + // Forces indexing for no-ops, enabled for restore indices calls. Only considered in the no-op + // case public static final String FORCE_INDEXING_KEY = "forceIndexing"; - // Indicates an event source from an application with hooks that have already been processed and should not be reprocessed + // Indicates an event source from an application with hooks that have already been processed and + // should not be reprocessed public static final String APP_SOURCE = "appSource"; // App sources public static final String UI_SOURCE = "ui"; - /** - * Entities - */ + /** Entities */ public static final String CORP_USER_ENTITY_NAME = "corpuser"; + public static final String CORP_GROUP_ENTITY_NAME = "corpGroup"; public static final String DATASET_ENTITY_NAME = "dataset"; public static final String CHART_ENTITY_NAME = "chart"; @@ -74,11 +74,10 @@ public class Constants { public static final String DATA_PRODUCT_ENTITY_NAME = "dataProduct"; public static final String OWNERSHIP_TYPE_ENTITY_NAME = "ownershipType"; - /** - * Aspects - */ + /** Aspects */ // Common public static final String OWNERSHIP_ASPECT_NAME = "ownership"; + public static final String INSTITUTIONAL_MEMORY_ASPECT_NAME = "institutionalMemory"; public static final String DATA_PLATFORM_INSTANCE_ASPECT_NAME = "dataPlatformInstance"; public static final String BROWSE_PATHS_ASPECT_NAME = "browsePaths"; @@ -136,19 +135,22 @@ public class Constants { // Dashboard public static final String DASHBOARD_KEY_ASPECT_NAME = "dashboardKey"; public static final String DASHBOARD_INFO_ASPECT_NAME = "dashboardInfo"; - public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = "editableDashboardProperties"; + public static final String EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME = + "editableDashboardProperties"; public static final String DASHBOARD_USAGE_STATISTICS_ASPECT_NAME = "dashboardUsageStatistics"; // Notebook public static final String NOTEBOOK_KEY_ASPECT_NAME = "notebookKey"; public static final String NOTEBOOK_INFO_ASPECT_NAME = "notebookInfo"; public static final String NOTEBOOK_CONTENT_ASPECT_NAME = "notebookContent"; - public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = "editableNotebookProperties"; + public static final String EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME = + "editableNotebookProperties"; // DataFlow public static final String DATA_FLOW_KEY_ASPECT_NAME = "dataFlowKey"; public static final String DATA_FLOW_INFO_ASPECT_NAME = "dataFlowInfo"; - public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = "editableDataFlowProperties"; + public static final String EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME = + "editableDataFlowProperties"; // DataJob public static final String DATA_JOB_KEY_ASPECT_NAME = "dataJobKey"; @@ -162,19 +164,22 @@ public class Constants { // DataPlatformInstance public static final String DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME = "dataPlatformInstanceKey"; - public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = "dataPlatformInstanceProperties"; + public static final String DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataPlatformInstanceProperties"; // ML Feature public static final String ML_FEATURE_KEY_ASPECT_NAME = "mlFeatureKey"; public static final String ML_FEATURE_PROPERTIES_ASPECT_NAME = "mlFeatureProperties"; - public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureProperties"; + public static final String ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureProperties"; // ML Feature Table public static final String ML_FEATURE_TABLE_KEY_ASPECT_NAME = "mlFeatureTableKey"; public static final String ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME = "mlFeatureTableProperties"; - public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlFeatureTableProperties"; + public static final String ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlFeatureTableProperties"; - //ML Model + // ML Model public static final String ML_MODEL_KEY_ASPECT_NAME = "mlModelKey"; public static final String ML_MODEL_PROPERTIES_ASPECT_NAME = "mlModelProperties"; public static final String ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelProperties"; @@ -192,12 +197,14 @@ public class Constants { // ML Model Group public static final String ML_MODEL_GROUP_KEY_ASPECT_NAME = "mlModelGroupKey"; public static final String ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME = "mlModelGroupProperties"; - public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlModelGroupProperties"; + public static final String ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlModelGroupProperties"; // ML Primary Key public static final String ML_PRIMARY_KEY_KEY_ASPECT_NAME = "mlPrimaryKeyKey"; public static final String ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME = "mlPrimaryKeyProperties"; - public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = "editableMlPrimaryKeyProperties"; + public static final String ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableMlPrimaryKeyProperties"; // Policy public static final String DATAHUB_POLICY_INFO_ASPECT_NAME = "dataHubPolicyInfo"; @@ -212,15 +219,16 @@ public class Constants { // Container public static final String CONTAINER_KEY_ASPECT_NAME = "containerKey"; public static final String CONTAINER_PROPERTIES_ASPECT_NAME = "containerProperties"; - public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = "editableContainerProperties"; + public static final String CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME = + "editableContainerProperties"; public static final String CONTAINER_ASPECT_NAME = "container"; // parent container - // Glossary term + // Glossary term public static final String GLOSSARY_TERM_KEY_ASPECT_NAME = "glossaryTermKey"; public static final String GLOSSARY_TERM_INFO_ASPECT_NAME = "glossaryTermInfo"; public static final String GLOSSARY_RELATED_TERM_ASPECT_NAME = "glossaryRelatedTerms"; - // Glossary node + // Glossary node public static final String GLOSSARY_NODE_KEY_ASPECT_NAME = "glossaryNodeKey"; public static final String GLOSSARY_NODE_INFO_ASPECT_NAME = "glossaryNodeInfo"; @@ -304,24 +312,24 @@ public class Constants { public static final String CHANGE_EVENT_PLATFORM_EVENT_NAME = "entityChangeEvent"; - /** - * Retention - */ + /** Retention */ public static final String DATAHUB_RETENTION_ENTITY = "dataHubRetention"; + public static final String DATAHUB_RETENTION_ASPECT = "dataHubRetentionConfig"; public static final String DATAHUB_RETENTION_KEY_ASPECT = "dataHubRetentionKey"; - /** - * User Status - */ + + /** User Status */ public static final String CORP_USER_STATUS_ACTIVE = "ACTIVE"; - /** - * Task Runs - */ + /** Task Runs */ public static final String DATA_PROCESS_INSTANCE_ENTITY_NAME = "dataProcessInstance"; - public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = "dataProcessInstanceProperties"; - public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = "dataProcessInstanceRunEvent"; - public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = "dataProcessInstanceRelationships"; + + public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = + "dataProcessInstanceProperties"; + public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = + "dataProcessInstanceRunEvent"; + public static final String DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME = + "dataProcessInstanceRelationships"; // Posts public static final String POST_INFO_ASPECT_NAME = "postInfo"; @@ -332,8 +340,8 @@ public class Constants { public static final String CLIENT_ID_ASPECT = "telemetryClientId"; // Step - public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = "dataHubStepStateProperties"; - + public static final String DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME = + "dataHubStepStateProperties"; // Authorization public static final String REST_API_AUTHORIZATION_ENABLED_ENV = "REST_API_AUTHORIZATION_ENABLED"; @@ -357,6 +365,5 @@ public class Constants { // DAO public static final long LATEST_VERSION = 0; - private Constants() { - } + private Constants() {} } diff --git a/li-utils/src/main/java/com/linkedin/util/Configuration.java b/li-utils/src/main/java/com/linkedin/util/Configuration.java index cf2085839aefa..e0a1f181b48aa 100644 --- a/li-utils/src/main/java/com/linkedin/util/Configuration.java +++ b/li-utils/src/main/java/com/linkedin/util/Configuration.java @@ -1,34 +1,34 @@ package com.linkedin.util; -import javax.annotation.Nonnull; import java.io.IOException; import java.io.InputStream; import java.util.Optional; import java.util.Properties; +import javax.annotation.Nonnull; public class Configuration { - private Configuration() { - } + private Configuration() {} - @Nonnull - public static Properties loadProperties(@Nonnull String configFile) { - Properties configuration = new Properties(); - try (InputStream inputStream = Configuration.class.getClassLoader().getResourceAsStream(configFile)) { - configuration.load(inputStream); - } catch (IOException e) { - throw new RuntimeException("Can't read file: " + configFile); - } - return configuration; + @Nonnull + public static Properties loadProperties(@Nonnull String configFile) { + Properties configuration = new Properties(); + try (InputStream inputStream = + Configuration.class.getClassLoader().getResourceAsStream(configFile)) { + configuration.load(inputStream); + } catch (IOException e) { + throw new RuntimeException("Can't read file: " + configFile); } + return configuration; + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar) { - return System.getenv(envVar); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar) { + return System.getenv(envVar); + } - @Nonnull - public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { - return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); - } + @Nonnull + public static String getEnvironmentVariable(@Nonnull String envVar, @Nonnull String defaultVal) { + return Optional.ofNullable(System.getenv(envVar)).orElse(defaultVal); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java index ab90b3e054a3b..22d8065844a8c 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/Uri.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Uri { - private final String _uri; + private final String _uri; - public Uri(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _uri = url; + public Uri(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _uri = url; + } - @Override - public String toString() { - return _uri; - } + @Override + public String toString() { + return _uri; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Uri)) { - return false; - } else { - return _uri.equals(((Uri) obj)._uri); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Uri)) { + return false; + } else { + return _uri.equals(((Uri) obj)._uri); } + } - @Override - public int hashCode() { - return _uri.hashCode(); - } + @Override + public int hashCode() { + return _uri.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_uri); - } + public URI toURI() throws URISyntaxException { + return new URI(_uri); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java index a23d2b08752d1..6a30bb22a73a3 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/uri/UriCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UriCoercer implements DirectCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UriCoercer(), Uri.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UriCoercer(), Uri.class); - @Override - public Object coerceInput(Uri object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Uri object) throws ClassCastException { + return object.toString(); + } - @Override - public Uri coerceOutput(Object object) throws TemplateOutputCastException { - return new Uri((String) object); - } + @Override + public Uri coerceOutput(Object object) throws TemplateOutputCastException { + return new Uri((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java index 3e1950160cca2..17abf09361e36 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/Url.java @@ -4,35 +4,35 @@ import java.net.URISyntaxException; public class Url { - private final String _url; + private final String _url; - public Url(String url) { - if (url == null) { - throw new NullPointerException("URL must be non-null"); - } - _url = url; + public Url(String url) { + if (url == null) { + throw new NullPointerException("URL must be non-null"); } + _url = url; + } - @Override - public String toString() { - return _url; - } + @Override + public String toString() { + return _url; + } - @Override - public boolean equals(Object obj) { - if (!(obj instanceof Url)) { - return false; - } else { - return _url.equals(((Url) obj)._url); - } + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Url)) { + return false; + } else { + return _url.equals(((Url) obj)._url); } + } - @Override - public int hashCode() { - return _url.hashCode(); - } + @Override + public int hashCode() { + return _url.hashCode(); + } - public URI toURI() throws URISyntaxException { - return new URI(_url); - } + public URI toURI() throws URISyntaxException { + return new URI(_url); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java index 9424fffdd2f68..3bae43ee0ca6a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/url/UrlCoercer.java @@ -5,15 +5,16 @@ import com.linkedin.data.template.TemplateOutputCastException; public class UrlCoercer implements DirectCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new UrlCoercer(), Url.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new UrlCoercer(), Url.class); - @Override - public Object coerceInput(Url object) throws ClassCastException { - return object.toString(); - } + @Override + public Object coerceInput(Url object) throws ClassCastException { + return object.toString(); + } - @Override - public Url coerceOutput(Object object) throws TemplateOutputCastException { - return new Url((String) object); - } + @Override + public Url coerceOutput(Object object) throws TemplateOutputCastException { + return new Url((String) object); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java index feb7cacd7a48a..0110471c9cdfd 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanFlowUrn extends Urn { public static final String ENTITY_TYPE = "azkabanFlow"; @@ -48,7 +47,9 @@ public static AzkabanFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new AzkabanFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static AzkabanFlowUrn deserialize(String rawUrn) throws URISyntaxExceptio } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(AzkabanFlowUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanFlowUrn.class); + public AzkabanFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java index 662c89b12139f..f264bccbc5056 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/AzkabanJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class AzkabanJobUrn extends Urn { public static final String ENTITY_TYPE = "azkabanJob"; @@ -42,7 +41,8 @@ public static AzkabanJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new AzkabanJobUrn((AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), + return new AzkabanJobUrn( + (AzkabanFlowUrn) key.getAs(0, AzkabanFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -57,18 +57,20 @@ public static AzkabanJobUrn deserialize(String rawUrn) throws URISyntaxException static { Custom.initializeCustomClass(AzkabanFlowUrn.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(AzkabanJobUrn object) throws ClassCastException { + return object.toString(); + } - public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return AzkabanJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, AzkabanJobUrn.class); + public AzkabanJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return AzkabanJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + AzkabanJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java index 4840a2bf7b1e8..8193bd05b527a 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/ChartUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class ChartUrn extends Urn { public static final String ENTITY_TYPE = "chart"; @@ -42,7 +41,8 @@ public static ChartUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new ChartUrn((String)key.getAs(0, String.class), (String)key.getAs(1, String.class)); + return new ChartUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static ChartUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(ChartUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(ChartUrn object) throws ClassCastException { + return object.toString(); + } - public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return ChartUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, ChartUrn.class); + public ChartUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return ChartUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + ChartUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java index da33ed2a625f1..0ed5b3514e786 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpGroupUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class CorpGroupUrn extends Urn { public static final String ENTITY_TYPE = "corpGroup"; @@ -31,7 +30,7 @@ public static CorpGroupUrn createFromString(String rawUrn) throws URISyntaxExcep } private static CorpGroupUrn decodeUrn(String groupName) throws Exception { - return new CorpGroupUrn(TupleKey.create(new Object[]{groupName}), groupName); + return new CorpGroupUrn(TupleKey.create(new Object[] {groupName}), groupName); } public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { @@ -45,9 +44,10 @@ public static CorpGroupUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return decodeUrn((String)key.getAs(0, String.class)); + return decodeUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -58,18 +58,20 @@ public static CorpGroupUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(CorpGroupUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(CorpGroupUrn object) throws ClassCastException { + return object.toString(); + } - public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpGroupUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpGroupUrn.class); + public CorpGroupUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpGroupUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpGroupUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java index da527254bbe2c..701e18a015753 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/CorpuserUrn.java @@ -1,12 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class CorpuserUrn extends Urn { @@ -40,7 +37,8 @@ public static CorpuserUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new CorpuserUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -51,19 +49,20 @@ public static CorpuserUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(CorpuserUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(CorpuserUrn object) throws ClassCastException { + return object.toString(); + } - public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return CorpuserUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, CorpuserUrn.class); + public CorpuserUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return CorpuserUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + CorpuserUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java index ed4b38fe2f2be..ceb06986989b5 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DashboardUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DashboardUrn extends Urn { public static final String ENTITY_TYPE = "dashboard"; @@ -42,7 +41,8 @@ public static DashboardUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DashboardUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new DashboardUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -55,18 +55,20 @@ public static DashboardUrn deserialize(String rawUrn) throws URISyntaxException } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DashboardUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DashboardUrn object) throws ClassCastException { + return object.toString(); + } - public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DashboardUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DashboardUrn.class); + public DashboardUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DashboardUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DashboardUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java index 40e6d796d1882..2df70eed13343 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataFlowUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataFlowUrn extends Urn { public static final String ENTITY_TYPE = "dataFlow"; @@ -48,7 +47,9 @@ public static DataFlowUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataFlowUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataFlowUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (String) key.getAs(2, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); @@ -62,18 +63,20 @@ public static DataFlowUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DataFlowUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DataFlowUrn object) throws ClassCastException { + return object.toString(); + } - public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataFlowUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataFlowUrn.class); + public DataFlowUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataFlowUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataFlowUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java index 46579a40897a3..6d0f37d1796b8 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataJobUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DataJobUrn extends Urn { public static final String ENTITY_TYPE = "dataJob"; @@ -42,8 +41,8 @@ public static DataJobUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataJobUrn((DataFlowUrn) key.getAs(0, DataFlowUrn.class), - (String) key.getAs(1, String.class)); + return new DataJobUrn( + (DataFlowUrn) key.getAs(0, DataFlowUrn.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -57,18 +56,20 @@ public static DataJobUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataFlowUrn.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DataJobUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DataJobUrn object) throws ClassCastException { + return object.toString(); + } - public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataJobUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataJobUrn.class); + public DataJobUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataJobUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataJobUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java index 25d219ef2c39e..910e6b9c98e96 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataPlatformUrn.java @@ -3,10 +3,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class DataPlatformUrn extends Urn { public static final String ENTITY_TYPE = "dataPlatform"; @@ -50,18 +48,20 @@ public static DataPlatformUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DataPlatformUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DataPlatformUrn object) throws ClassCastException { + return object.toString(); + } - public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataPlatformUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataPlatformUrn.class); + public DataPlatformUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataPlatformUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataPlatformUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java index 2edfdae251b01..513ffa6d8cf44 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DataProcessUrn.java @@ -4,12 +4,8 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public class DataProcessUrn extends Urn { public static final String ENTITY_TYPE = "dataProcess"; @@ -55,10 +51,13 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DataProcessUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class), + return new DataProcessUrn( + (String) key.getAs(0, String.class), + (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -67,18 +66,20 @@ public static DataProcessUrn createFromUrn(Urn urn) throws URISyntaxException { static { Custom.initializeCustomClass(DataProcessUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DataProcessUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DataProcessUrn object) throws ClassCastException { + return object.toString(); + } - public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DataProcessUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DataProcessUrn.class); + public DataProcessUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DataProcessUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DataProcessUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java index 3d4b7d71566be..14cbfaf02fbae 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetFieldUrn.java @@ -5,53 +5,49 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Standardized dataset field information identifier - */ +/** Standardized dataset field information identifier */ public class DatasetFieldUrn extends Urn { // uniquely identifies urn's key type public static final String ENTITY_TYPE = "datasetField"; - /** - * Dataset urn of the datasetFieldUrn - */ + /** Dataset urn of the datasetFieldUrn */ private final DatasetUrn _dataset; - /** - * Field of datasetFieldUrn - */ + /** Field of datasetFieldUrn */ private final String _fieldPath; static { Custom.initializeCustomClass(DatasetUrn.class); - Custom.registerCoercer(new DirectCoercer() { + Custom.registerCoercer( + new DirectCoercer() { - @Override - public String coerceInput(DatasetFieldUrn object) throws ClassCastException { - return object.toString(); - } + @Override + public String coerceInput(DatasetFieldUrn object) throws ClassCastException { + return object.toString(); + } - @Override - public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { - if (object instanceof String) { - try { - return DatasetFieldUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + @Override + public DatasetFieldUrn coerceOutput(Object object) throws TemplateOutputCastException { + if (object instanceof String) { + try { + return DatasetFieldUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + throw new TemplateOutputCastException( + (("Output '" + object) + + ("' is not a String, and cannot be coerced to " + + DatasetFieldUrn.class.getName()))); } - } - throw new TemplateOutputCastException((("Output '" + object) + ("' is not a String, and cannot be coerced to " - + DatasetFieldUrn.class.getName()))); - } - }, DatasetFieldUrn.class); + }, + DatasetFieldUrn.class); } - public DatasetFieldUrn(String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { + public DatasetFieldUrn( + String dataPlatform, String datasetName, FabricType fabricType, String fieldPath) { this(new DatasetUrn(new DataPlatformUrn(dataPlatform), datasetName, fabricType), fieldPath); } @@ -86,9 +82,11 @@ public static DatasetFieldUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetFieldUrn((DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); + return new DatasetFieldUrn( + (DatasetUrn) key.getAs(0, DatasetUrn.class), (String) key.getAs(1, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java index 3be084d1daff9..5f18ce5f1abe7 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/DatasetUrn.java @@ -6,7 +6,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class DatasetUrn extends Urn { public static final String ENTITY_TYPE = "dataset"; @@ -49,10 +48,13 @@ public static DatasetUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new DatasetUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new DatasetUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -66,18 +68,20 @@ public static DatasetUrn deserialize(String rawUrn) throws URISyntaxException { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(DatasetUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(DatasetUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(DatasetUrn object) throws ClassCastException { + return object.toString(); + } - public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return DatasetUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, DatasetUrn.class); + public DatasetUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return DatasetUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + DatasetUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java index 597ae3386fec1..24fd7f26bf977 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/FabricUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class FabricUrn extends Urn { public static final String ENTITY_TYPE = "fabric"; @@ -45,18 +44,20 @@ public static FabricUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(FabricUrn object) throws ClassCastException { - return object.toString(); - } - - public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return FabricUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, FabricUrn.class); + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(FabricUrn object) throws ClassCastException { + return object.toString(); + } + + public FabricUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return FabricUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + FabricUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java index 29ff1aa5fcdb3..7820eac21755d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryNodeUrn.java @@ -3,66 +3,66 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class GlossaryNodeUrn extends Urn { - public static final String ENTITY_TYPE = "glossaryNode"; + public static final String ENTITY_TYPE = "glossaryNode"; - private final String _name; + private final String _name; - public GlossaryNodeUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public GlossaryNodeUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getNameEntity() { - return _name; - } + public String getNameEntity() { + return _name; + } - public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static GlossaryNodeUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys."); - } else { - try { - return new GlossaryNodeUrn((String) key.getAs(0, String.class)); - } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); - } - } + public static GlossaryNodeUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException(urn.toString(), "Urn entity type should be 'glossaryNode'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException(urn.toString(), "Invalid number of keys."); + } else { + try { + return new GlossaryNodeUrn((String) key.getAs(0, String.class)); + } catch (Exception var3) { + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } + } } + } - public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static GlossaryNodeUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(GlossaryNodeUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryNodeUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public GlossaryNodeUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryNodeUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, GlossaryNodeUrn.class); - } - + } + }, + GlossaryNodeUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java index bf8ec131d410e..f7e3496fbc582 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/GlossaryTermUrn.java @@ -1,14 +1,9 @@ package com.linkedin.common.urn; -import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - -import java.lang.reflect.Array; import java.net.URISyntaxException; -import java.util.regex.Pattern; - public final class GlossaryTermUrn extends Urn { @@ -42,7 +37,8 @@ public static GlossaryTermUrn createFromUrn(Urn urn) throws URISyntaxException { try { return new GlossaryTermUrn((String) key.getAs(0, String.class)); } catch (Exception var3) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); + throw new URISyntaxException( + urn.toString(), "Invalid URN Parameter: '" + var3.getMessage()); } } } @@ -53,19 +49,20 @@ public static GlossaryTermUrn deserialize(String rawUrn) throws URISyntaxExcepti } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(GlossaryTermUrn object) throws ClassCastException { + return object.toString(); + } - public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return GlossaryTermUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, GlossaryTermUrn.class); + public GlossaryTermUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return GlossaryTermUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + GlossaryTermUrn.class); } - } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java index 8774ba36d07b2..5c05b74cb0038 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLFeatureUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public final class MLFeatureUrn extends Urn { public static final String ENTITY_TYPE = "mlFeature"; @@ -43,7 +42,8 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLFeatureUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new MLFeatureUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -52,18 +52,20 @@ public static MLFeatureUrn createFromUrn(Urn urn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(MLFeatureUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(MLFeatureUrn object) throws ClassCastException { + return object.toString(); + } - public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLFeatureUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLFeatureUrn.class); + public MLFeatureUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLFeatureUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLFeatureUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java index ded7f90dcc112..85680f5a3922f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/MLModelUrn.java @@ -1,15 +1,11 @@ package com.linkedin.common.urn; +import com.linkedin.common.FabricType; import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; -import com.linkedin.common.FabricType; - -import static com.linkedin.common.urn.UrnUtils.toFabricType; - - public final class MLModelUrn extends Urn { public static final String ENTITY_TYPE = "mlModel"; @@ -52,8 +48,10 @@ public static MLModelUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new MLModelUrn((DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), - (String) key.getAs(1, String.class), (FabricType) key.getAs(2, FabricType.class)); + return new MLModelUrn( + (DataPlatformUrn) key.getAs(0, DataPlatformUrn.class), + (String) key.getAs(1, String.class), + (FabricType) key.getAs(2, FabricType.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -68,18 +66,20 @@ public static MLModelUrn deserialize(String rawUrn) throws URISyntaxException { static { Custom.initializeCustomClass(DataPlatformUrn.class); Custom.initializeCustomClass(FabricType.class); - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(MLModelUrn object) throws ClassCastException { - return object.toString(); - } - - public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return MLModelUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, MLModelUrn.class); + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(MLModelUrn object) throws ClassCastException { + return object.toString(); + } + + public MLModelUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return MLModelUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + MLModelUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java index c9d6c203d2ed8..00a0660bbf49d 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/NotebookUrn.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public class NotebookUrn extends Urn { public static final String ENTITY_TYPE = "notebook"; @@ -41,7 +40,8 @@ public static NotebookUrn createFromUrn(Urn urn) throws URISyntaxException { throw new URISyntaxException(urn.toString(), "Invalid number of keys."); } else { try { - return new NotebookUrn((String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); + return new NotebookUrn( + (String) key.getAs(0, String.class), (String) key.getAs(1, String.class)); } catch (Exception e) { throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } @@ -54,18 +54,20 @@ public static NotebookUrn deserialize(String rawUrn) throws URISyntaxException { } static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(NotebookUrn object) throws ClassCastException { - return object.toString(); - } + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(NotebookUrn object) throws ClassCastException { + return object.toString(); + } - public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return NotebookUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } - } - }, NotebookUrn.class); + public NotebookUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return NotebookUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); + } + } + }, + NotebookUrn.class); } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java index 1375cf345b084..60cf2d4e16819 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TagUrn.java @@ -3,65 +3,67 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TagUrn extends Urn { - public static final String ENTITY_TYPE = "tag"; + public static final String ENTITY_TYPE = "tag"; - private final String _name; + private final String _name; - public TagUrn(String name) { - super(ENTITY_TYPE, TupleKey.create(name)); - this._name = name; - } + public TagUrn(String name) { + super(ENTITY_TYPE, TupleKey.create(name)); + this._name = name; + } - public String getName() { - return _name; - } + public String getName() { + return _name; + } - public static TagUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TagUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 1) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); - } else { - try { - return new TagUrn((String) key.getAs(0, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TagUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), "Urn entity type should be '" + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 1) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 1."); + } else { + try { + return new TagUrn((String) key.getAs(0, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TagUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TagUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(TagUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(TagUrn object) throws ClassCastException { + return object.toString(); + } - public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TagUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TagUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TagUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TagUrn.class); - } + } + }, + TagUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java index 5b348b7d9b1a9..ecdd4f754c4ea 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TestEntityUrn.java @@ -3,68 +3,69 @@ import com.linkedin.data.template.Custom; import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - import java.net.URISyntaxException; - public final class TestEntityUrn extends Urn { - public static final String ENTITY_TYPE = "testEntity"; + public static final String ENTITY_TYPE = "testEntity"; - private final String _keyPart1; - private final String _keyPart2; - private final String _keyPart3; + private final String _keyPart1; + private final String _keyPart2; + private final String _keyPart3; - public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { - super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); - this._keyPart1 = keyPart1; - this._keyPart2 = keyPart2; - this._keyPart3 = keyPart3; - } + public TestEntityUrn(String keyPart1, String keyPart2, String keyPart3) { + super(ENTITY_TYPE, TupleKey.create(keyPart1, keyPart2, keyPart3)); + this._keyPart1 = keyPart1; + this._keyPart2 = keyPart2; + this._keyPart3 = keyPart3; + } - public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { - return createFromUrn(Urn.createFromString(rawUrn)); - } + public static TestEntityUrn createFromString(String rawUrn) throws URISyntaxException { + return createFromUrn(Urn.createFromString(rawUrn)); + } - public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { - if (!"li".equals(urn.getNamespace())) { - throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); - } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { - throw new URISyntaxException(urn.toString(), "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); - } else { - TupleKey key = urn.getEntityKey(); - if (key.size() != 3) { - throw new URISyntaxException(urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); - } else { - try { - return new TestEntityUrn( - key.getAs(0, String.class), - key.getAs(1, String.class), - key.getAs(2, String.class)); - } catch (Exception e) { - throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); - } - } + public static TestEntityUrn createFromUrn(Urn urn) throws URISyntaxException { + if (!"li".equals(urn.getNamespace())) { + throw new URISyntaxException(urn.toString(), "Urn namespace type should be 'li'."); + } else if (!ENTITY_TYPE.equals(urn.getEntityType())) { + throw new URISyntaxException( + urn.toString(), + "Urn entity type should be '" + ENTITY_TYPE + " got " + urn.getEntityType() + "'."); + } else { + TupleKey key = urn.getEntityKey(); + if (key.size() != 3) { + throw new URISyntaxException( + urn.toString(), "Invalid number of keys: found " + key.size() + " expected 3."); + } else { + try { + return new TestEntityUrn( + key.getAs(0, String.class), key.getAs(1, String.class), key.getAs(2, String.class)); + } catch (Exception e) { + throw new URISyntaxException(urn.toString(), "Invalid URN Parameter: '" + e.getMessage()); } + } } + } - public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { - return createFromString(rawUrn); - } + public static TestEntityUrn deserialize(String rawUrn) throws URISyntaxException { + return createFromString(rawUrn); + } - static { - Custom.registerCoercer(new DirectCoercer() { - public Object coerceInput(TestEntityUrn object) throws ClassCastException { - return object.toString(); - } + static { + Custom.registerCoercer( + new DirectCoercer() { + public Object coerceInput(TestEntityUrn object) throws ClassCastException { + return object.toString(); + } - public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { - try { - return TestEntityUrn.createFromString((String) object); - } catch (URISyntaxException e) { - throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); - } + public TestEntityUrn coerceOutput(Object object) throws TemplateOutputCastException { + try { + return TestEntityUrn.createFromString((String) object); + } catch (URISyntaxException e) { + throw new TemplateOutputCastException("Invalid URN syntax: " + e.getMessage(), e); } - }, TestEntityUrn.class); - } + } + }, + TestEntityUrn.class); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java index c26e0d2571b33..f847252e28836 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/TupleKey.java @@ -8,11 +8,10 @@ import java.util.Collections; import java.util.List; - /** - * Represents the entity key portion of a Urn, encoded as a tuple of Strings. - * A single-element tuple is encoded simply as the value of that element. A tuple with multiple - * elements is encoded as a parenthesized list of strings, comma-delimited. + * Represents the entity key portion of a Urn, encoded as a tuple of Strings. A single-element tuple + * is encoded simply as the value of that element. A tuple with multiple elements is encoded as a + * parenthesized list of strings, comma-delimited. */ public class TupleKey { public static final char START_TUPLE = '('; @@ -31,27 +30,26 @@ public TupleKey(List tuple) { /** * Constructs a {@code TupleKey} given a list of tuple parts. - *

- * When {@code calledFromExternal} is {@code false}, it means the constructor - * was called from within this class, where we can ensure our implementation - * satisfies some constraints and skip some work. - *

- * The work we skip is checking that no tuple parts are null and wrapping the - * list with an unmodifiable view. - *

- * For context, an earlier performance optimization introduced from Guava the - * {@code ImmutableList}, which gives both of that for free. Since then, we - * have encountered complications with Guava (specifically, Hadoop at the time - * of this writing requires using Guava 11 -- see LIHADOOP-44200). In order to - * resolve that with minimal effect, we copy this behavior here. - *

- * Whether this optimization is meaningful can be examined later, if time is - * permitting, or {@code List#copyOf} from JDK 10 can be used to recover the - * benefits more elegantly when it is available for us to use. + * + *

When {@code calledFromExternal} is {@code false}, it means the constructor was called from + * within this class, where we can ensure our implementation satisfies some constraints and skip + * some work. + * + *

The work we skip is checking that no tuple parts are null and wrapping the list with an + * unmodifiable view. + * + *

For context, an earlier performance optimization introduced from Guava the {@code + * ImmutableList}, which gives both of that for free. Since then, we have encountered + * complications with Guava (specifically, Hadoop at the time of this writing requires using Guava + * 11 -- see LIHADOOP-44200). In order to resolve that with minimal effect, we copy this behavior + * here. + * + *

Whether this optimization is meaningful can be examined later, if time is permitting, or + * {@code List#copyOf} from JDK 10 can be used to recover the benefits more elegantly when it is + * available for us to use. * * @param tuple tuple parts - * @param calledFromExternal whether the constructions is invoked from outside - * of this class + * @param calledFromExternal whether the constructions is invoked from outside of this class */ private TupleKey(List tuple, boolean calledFromExternal) { _tuple = calledFromExternal ? Collections.unmodifiableList(checkStringsNotNull(tuple)) : tuple; @@ -74,9 +72,8 @@ public static TupleKey createWithOneKeyPart(String input) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -99,9 +96,8 @@ public static TupleKey create(Object... tuple) { } /** - * Create a tuple key from a sequence of Objects. The resulting tuple - * consists of the sequence of String values resulting from calling .toString() on each - * object in the input sequence + * Create a tuple key from a sequence of Objects. The resulting tuple consists of the sequence of + * String values resulting from calling .toString() on each object in the input sequence * * @param tuple - a sequence of Objects to be represented in the tuple * @return - a TupleKey representation of the object sequence @@ -130,7 +126,8 @@ public String get(int index) { * Return a tuple element coerced to a specific type * * @param index - the index of the tuple element to be returned - * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, Long, or an Enum subclass + * @param clazz - the Class object for the return type. Must be String, Short, Boolean, Integer, + * Long, or an Enum subclass * @param - the desired type for the returned object. * @return The specified element of the tuple, coerced to the specified type T. */ @@ -166,9 +163,7 @@ public T getAs(int index, Class clazz) { return rv; } - /** - * Helper method to capture E. - */ + /** Helper method to capture E. */ private > Enum getEnumValue(Class clazz, String value) { @SuppressWarnings("unchecked") final Class enumClazz = (Class) clazz.asSubclass(Enum.class); @@ -228,6 +223,7 @@ public static TupleKey fromString(String s) throws URISyntaxException { /** * Create a tuple key from a string starting at the given index. + * * @param s raw urn string or urn type specific string. * @param startIndex index where urn type specific string starts. * @return entity tuple key. @@ -237,7 +233,8 @@ public static TupleKey fromString(String s, int startIndex) throws URISyntaxExce return new TupleKey(parseKeyParts(s, startIndex), false); } - private static List parseKeyParts(String input, int startIndex) throws URISyntaxException { + private static List parseKeyParts(String input, int startIndex) + throws URISyntaxException { if (startIndex >= input.length()) { return Collections.emptyList(); } @@ -270,7 +267,7 @@ private static List parseKeyParts(String input, int startIndex) throws U List parts = new ArrayList<>(3); int numStartedParenPairs = 1; // We know we have at least one starting paren - int partStart = startIndex + 1; // +1 to skip opening paren + int partStart = startIndex + 1; // +1 to skip opening paren for (int i = startIndex + 1; i < input.length(); i++) { char c = input.charAt(i); if (c == START_TUPLE) { @@ -302,7 +299,8 @@ private static List parseKeyParts(String input, int startIndex) throws U throw new URISyntaxException(input, "mismatched paren nesting"); } - int lastPartEnd = input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); + int lastPartEnd = + input.charAt(input.length() - 1) == END_TUPLE ? input.length() - 1 : input.length(); if (lastPartEnd - partStart <= 0) { throw new URISyntaxException(input, "empty part disallowed"); @@ -347,4 +345,4 @@ private static List checkStringsNotNull(List list) { } return list; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java index 84231fdf3be4a..e7ae51b57671f 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/Urn.java @@ -8,34 +8,30 @@ import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; - /** - * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. - * Our default URN format uses the non-standard namespace identifier "li", and hence default URNs - * begin with "urn:li:". Note that the namespace according to - * RFC 2141 [Section 2.1] is case-insensitive and + * Represents a URN (Uniform Resource Name) for a Linkedin entity, in the spirit of RFC 2141. Our + * default URN format uses the non-standard namespace identifier "li", and hence default URNs begin + * with "urn:li:". Note that the namespace according to RFC 2141 [Section 2.1] is case-insensitive and * for safety we only allow lower-case letters in our implementation. * - *

Our URNs all consist of an "entity type", which denotes an internal namespace for the resource, - * as well as an entity key, formatted as a tuple of parts. The full format of a URN is: + *

Our URNs all consist of an "entity type", which denotes an internal namespace for the + * resource, as well as an entity key, formatted as a tuple of parts. The full format of a URN is: * *

<URN> ::= urn:<namespace>:<entityType>:<entityKey> * - *

The entity key is represented as a tuple of strings. If the tuple is of length 1, the - * key is encoded directly. If the tuple has multiple parts, the parts are enclosed in - * parenthesizes and comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be - * encoded as: + *

The entity key is represented as a tuple of strings. If the tuple is of length 1, the key is + * encoded directly. If the tuple has multiple parts, the parts are enclosed in parenthesizes and + * comma-delimited, e.g., a URN whose key is the tuple [1, 2, 3] would be encoded as: * *

urn:li:example:(1,2,3) */ public class Urn { /** - * - * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String entityType, Object... - * tupleParts)} + * @deprecated Don't create the Urn string manually, use Typed Urns or {@link #create(String + * entityType, Object... tupleParts)} */ - @Deprecated - public static final String URN_PREFIX = "urn:li:"; + @Deprecated public static final String URN_PREFIX = "urn:li:"; private static final String URN_START = "urn:"; private static final String DEFAULT_NAMESPACE = "li"; @@ -46,29 +42,28 @@ public class Urn { // Used to speed up toString() in the common case where the Urn is built up // from parsing an input string. - @Nullable - private String _cachedStringUrn; + @Nullable private String _cachedStringUrn; static { Custom.registerCoercer(new UrnCoercer(), Urn.class); } /** - * Customized interner for all strings that may be used for _entityType. - * Urn._entityType is by nature a pretty small set of values, such as "member", - * "company" etc. Due to this fact, when an app creates and keeps in memory a - * large number of Urn's, it may end up with a very big number of identical strings. - * Thus it's worth saving memory by interning _entityType when an Urn is instantiated. - * String.intern() would be a natural choice, but it takes a few microseconds, and - * thus may become too expensive when many (temporary) Urns are generated in very - * quick succession. Thus we use a faster CHM below. Compared to the internal table - * used by String.intern() it has a bigger memory overhead per each interned string, - * but for a small set of canonical strings it doesn't matter. + * Customized interner for all strings that may be used for _entityType. Urn._entityType is by + * nature a pretty small set of values, such as "member", "company" etc. Due to this fact, when an + * app creates and keeps in memory a large number of Urn's, it may end up with a very big number + * of identical strings. Thus it's worth saving memory by interning _entityType when an Urn is + * instantiated. String.intern() would be a natural choice, but it takes a few microseconds, and + * thus may become too expensive when many (temporary) Urns are generated in very quick + * succession. Thus we use a faster CHM below. Compared to the internal table used by + * String.intern() it has a bigger memory overhead per each interned string, but for a small set + * of canonical strings it doesn't matter. */ private static final Map ENTITY_TYPE_INTERNER = new ConcurrentHashMap<>(); /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -77,10 +72,7 @@ public Urn(String rawUrn) throws URISyntaxException { _cachedStringUrn = rawUrn; if (!rawUrn.startsWith(URN_START)) { - throw new URISyntaxException( - rawUrn, - "Urn doesn't start with 'urn:'. Urn: " + rawUrn, - 0); + throw new URISyntaxException(rawUrn, "Urn doesn't start with 'urn:'. Urn: " + rawUrn, 0); } int secondColonIndex = rawUrn.indexOf(':', URN_START.length() + 1); @@ -89,9 +81,7 @@ public Urn(String rawUrn) throws URISyntaxException { // First char of entityType must be [a-z] if (!charIsLowerCaseAlphabet(rawUrn, secondColonIndex + 1)) { throw new URISyntaxException( - rawUrn, - "First char of entityType must be [a-z]! Urn: " + rawUrn, - secondColonIndex + 1); + rawUrn, "First char of entityType must be [a-z]! Urn: " + rawUrn, secondColonIndex + 1); } int thirdColonIndex = rawUrn.indexOf(':', secondColonIndex + 2); @@ -101,8 +91,7 @@ public Urn(String rawUrn) throws URISyntaxException { _entityType = rawUrn.substring(secondColonIndex + 1); if (!charsAreWordClass(_entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z0-9] chars. Urn: " + rawUrn); } _entityKey = new TupleKey(); return; @@ -111,15 +100,13 @@ public Urn(String rawUrn) throws URISyntaxException { String entityType = rawUrn.substring(secondColonIndex + 1, thirdColonIndex); if (!charsAreWordClass(entityType)) { throw new URISyntaxException( - rawUrn, - "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); + rawUrn, "entityType must have only [a-zA-Z_0-9] chars. Urn: " + rawUrn); } int numEntityKeyChars = rawUrn.length() - (thirdColonIndex + 1); if (numEntityKeyChars <= 0) { throw new URISyntaxException( - rawUrn, - "Urns with empty entityKey are not allowed. Urn: " + rawUrn); + rawUrn, "Urns with empty entityKey are not allowed. Urn: " + rawUrn); } _entityType = internEntityType(entityType); @@ -135,8 +122,8 @@ public Urn(String rawUrn) throws URISyntaxException { } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -158,9 +145,8 @@ public Urn(String namespace, String entityType, TupleKey entityKey) { } /** - * DEPRECATED - use {@link #createFromTuple(String, Object...)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, Object...)} Create a Urn from an entity type + * and a sequence of key parts. The key parts are converted to a tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -172,9 +158,9 @@ public static Urn create(String entityType, Object... tupleParts) { } /** - * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * DEPRECATED - use {@link #createFromTuple(String, java.util.Collection)} Create a Urn from an + * entity type and a sequence of key parts. The key parts are converted to a tuple using @see + * TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -186,8 +172,8 @@ public static Urn create(String entityType, Collection tupleParts) { } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -198,21 +184,22 @@ public static Urn createFromTuple(String entityType, Object... tupleParts) { } /** - * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an namespace, entity type and a sequence of key parts. The key parts are + * converted to a tuple using @see TupleKey#create * * @param namespace - The namespace of this urn. * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn * @return - a new Urn object */ - public static Urn createFromTupleWithNamespace(String namespace, String entityType, Object... tupleParts) { + public static Urn createFromTupleWithNamespace( + String namespace, String entityType, Object... tupleParts) { return new Urn(namespace, entityType, TupleKey.create(tupleParts)); } /** - * Create a Urn from an entity type and a sequence of key parts. The key parts are converted - * to a tuple using @see TupleKey#create + * Create a Urn from an entity type and a sequence of key parts. The key parts are converted to a + * tuple using @see TupleKey#create * * @param entityType - the entity type for the Urn * @param tupleParts - a sequence of objects representing the key of the Urn @@ -224,6 +211,7 @@ public static Urn createFromTuple(String entityType, Collection tupleParts) { /** * Create a Urn given its raw String representation. + * * @param rawUrn - the String representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -233,6 +221,7 @@ public static Urn createFromString(String rawUrn) throws URISyntaxException { /** * Create a Urn given its raw CharSequence representation. + * * @param rawUrn - the Char Sequence representation of a Urn. * @throws URISyntaxException - if the String is not a valid Urn. */ @@ -242,8 +231,8 @@ public static Urn createFromCharSequence(CharSequence rawUrn) throws URISyntaxEx } /** - * Create a Urn from an entity type and an encoded String key. The key is converted to a - * Tuple by parsing using @see TupleKey#fromString + * Create a Urn from an entity type and an encoded String key. The key is converted to a Tuple by + * parsing using @see TupleKey#fromString * * @param entityType - the entity type for the Urn * @param typeSpecificString - the encoded string representation of a TupleKey @@ -298,8 +287,8 @@ public Urn getIdAsUrn() { } /** - * Return the namespace-specific string portion of this URN, i.e., - * everything following the "urn:<namespace>:" prefix. + * Return the namespace-specific string portion of this URN, i.e., everything following the + * "urn:<namespace>:" prefix. * * @return The namespace-specific string portion of this URN */ @@ -344,28 +333,21 @@ public int hashCode() { return result; } - private static String validateAndExtractNamespace(String rawUrn, - int secondColonIndex) + private static String validateAndExtractNamespace(String rawUrn, int secondColonIndex) throws URISyntaxException { if (!charIsLowerCaseAlphabet(rawUrn, URN_START.length())) { throw new URISyntaxException( - rawUrn, - "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, - URN_START.length()); + rawUrn, "First char of Urn namespace must be [a-z]! Urn: " + rawUrn, URN_START.length()); } if (secondColonIndex == -1) { - throw new URISyntaxException( - rawUrn, - "Missing second ':' char. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Missing second ':' char. Urn: " + rawUrn); } int namespaceLen = secondColonIndex - URN_START.length(); if (namespaceLen > 32) { throw new URISyntaxException( - rawUrn, - "Namespace length > 32 chars. Urn: " + rawUrn, - secondColonIndex); + rawUrn, "Namespace length > 32 chars. Urn: " + rawUrn, secondColonIndex); } if (namespaceLen == 2 @@ -377,9 +359,7 @@ private static String validateAndExtractNamespace(String rawUrn, String namespace = rawUrn.substring(URN_START.length(), secondColonIndex); if (!charsAreValidNamespace(namespace)) { - throw new URISyntaxException( - rawUrn, - "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); + throw new URISyntaxException(rawUrn, "Chars in namespace must be [a-z0-9-]!. Urn: " + rawUrn); } return namespace; } @@ -414,17 +394,17 @@ private static boolean charsAreWordClass(String input) { char c = input.charAt(index); // Not using Character.isLowerCase etc on purpose because that is // unicode-aware and we only need ASCII. Handling only ASCII is faster. - if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') - || (c >= '0' && c <= '9') || c == '_')) { + if (!((c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z') + || (c >= '0' && c <= '9') + || c == '_')) { return false; } } return true; } - /** - * Intern a string to be assigned to the _entityType field. - */ + /** Intern a string to be assigned to the _entityType field. */ private static String internEntityType(String et) { // Most of the times this method is called, the canonical string is already // in the table, so let's do a quick get() first. @@ -436,4 +416,4 @@ private static String internEntityType(String et) { canonicalET = ENTITY_TYPE_INTERNER.putIfAbsent(et, et); return canonicalET != null ? canonicalET : et; } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java index a1bd54a995d65..e04796690db77 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnCoercer.java @@ -10,22 +10,21 @@ import java.net.URISyntaxException; public class UrnCoercer implements DirectCoercer { - public UrnCoercer() { - } + public UrnCoercer() {} - public Object coerceInput(Urn object) throws ClassCastException { - return object.toString(); - } + public Object coerceInput(Urn object) throws ClassCastException { + return object.toString(); + } - public Urn coerceOutput(Object object) throws TemplateOutputCastException { - if (object.getClass() != String.class) { - throw new TemplateOutputCastException("Urn not backed by String"); - } else { - try { - return Urn.createFromString((String)object); - } catch (URISyntaxException use) { - throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); - } - } + public Urn coerceOutput(Object object) throws TemplateOutputCastException { + if (object.getClass() != String.class) { + throw new TemplateOutputCastException("Urn not backed by String"); + } else { + try { + return Urn.createFromString((String) object); + } catch (URISyntaxException use) { + throw new TemplateOutputCastException("Invalid URN syntax: " + use.getMessage(), use); + } } + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java index b68e429a5202c..25cb5475d7299 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnUtils.java @@ -2,75 +2,75 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; - import java.net.URISyntaxException; import javax.annotation.Nonnull; import javax.annotation.Nullable; public class UrnUtils { - private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); + private static final CorpuserUrn UNKNOWN_ACTOR_URN = new CorpuserUrn("unknown"); - private UrnUtils() { - } + private UrnUtils() {} - /** - * Convert platform + dataset + origin into DatasetUrn - * @param platformName String, e.g. hdfs, oracle - * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS - * @param origin PROD, CORP, EI, DEV - * @return DatasetUrn - */ - @Nonnull - public static DatasetUrn toDatasetUrn(@Nonnull String platformName, @Nonnull String datasetName, - @Nonnull String origin) { - return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); - } + /** + * Convert platform + dataset + origin into DatasetUrn + * + * @param platformName String, e.g. hdfs, oracle + * @param datasetName String, e.g. /jobs/xxx, ABOOK.ADDRESS + * @param origin PROD, CORP, EI, DEV + * @return DatasetUrn + */ + @Nonnull + public static DatasetUrn toDatasetUrn( + @Nonnull String platformName, @Nonnull String datasetName, @Nonnull String origin) { + return new DatasetUrn(new DataPlatformUrn(platformName), datasetName, toFabricType(origin)); + } - /** - * Convert fabric String to FabricType - * @param fabric PROD, CORP, EI, DEV, LIT, PRIME - * @return FabricType - */ - @Nonnull - public static FabricType toFabricType(@Nonnull String fabric) { - switch (fabric.toUpperCase()) { - case "PROD": - return FabricType.PROD; - case "CORP": - return FabricType.CORP; - case "EI": - return FabricType.EI; - case "DEV": - return FabricType.DEV; - default: - throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); - } + /** + * Convert fabric String to FabricType + * + * @param fabric PROD, CORP, EI, DEV, LIT, PRIME + * @return FabricType + */ + @Nonnull + public static FabricType toFabricType(@Nonnull String fabric) { + switch (fabric.toUpperCase()) { + case "PROD": + return FabricType.PROD; + case "CORP": + return FabricType.CORP; + case "EI": + return FabricType.EI; + case "DEV": + return FabricType.DEV; + default: + throw new IllegalArgumentException("Unsupported Fabric Type: " + fabric); } + } - public static Urn getUrn(String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); - } + public static Urn getUrn(String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve entity with urn %s, invalid urn", urnStr)); } + } - /** - * Get audit stamp without time. If actor is null, set as Unknown Application URN. - * @param actor Urn - * @return AuditStamp - */ - @Nonnull - public static AuditStamp getAuditStamp(@Nullable Urn actor) { - return new AuditStamp().setActor(getActorOrDefault(actor)); - } + /** + * Get audit stamp without time. If actor is null, set as Unknown Application URN. + * + * @param actor Urn + * @return AuditStamp + */ + @Nonnull + public static AuditStamp getAuditStamp(@Nullable Urn actor) { + return new AuditStamp().setActor(getActorOrDefault(actor)); + } - /** - * Return actor URN, if input actor is null, return Unknown Application URN. - */ - @Nonnull - public static Urn getActorOrDefault(@Nullable Urn actor) { - return actor != null ? actor : UNKNOWN_ACTOR_URN; - } + /** Return actor URN, if input actor is null, return Unknown Application URN. */ + @Nonnull + public static Urn getActorOrDefault(@Nullable Urn actor) { + return actor != null ? actor : UNKNOWN_ACTOR_URN; + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java index fb3d79964f71d..24026f0287b22 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/UrnValidator.java @@ -7,12 +7,11 @@ import com.linkedin.data.schema.validator.ValidatorContext; import java.net.URISyntaxException; - /** * Rest.li Validator responsible for ensuring that {@link Urn} objects are well-formed. * - * Note that this validator does not validate the integrity of strongly typed urns, - * or validate Urn objects against their associated key aspect. + *

Note that this validator does not validate the integrity of strongly typed urns, or validate + * Urn objects against their associated key aspect. */ public class UrnValidator implements Validator { @Override @@ -22,9 +21,13 @@ public void validate(ValidatorContext context) { try { Urn.createFromString((String) context.dataElement().getValue()); } catch (URISyntaxException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid", context.dataElement().getValue())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid", + context.dataElement().getValue())); context.setHasFix(false); } } } -} \ No newline at end of file +} diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java index d5b7a7da456a9..2742d13fb4dba 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrn.java @@ -2,54 +2,53 @@ public class VersionedUrn { - private final String _urn; - private final String _versionStamp; - - public VersionedUrn(String urn, String versionStamp) { - _urn = urn; - _versionStamp = versionStamp; - } - - public String getUrn() { - return _urn; - } - - public String getVersionStamp() { - return _versionStamp; - } - - @SuppressWarnings("unchecked") - @Override - public boolean equals(Object obj) { - if (obj instanceof VersionedUrn) { - VersionedUrn other = (VersionedUrn) obj; - return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); - } - return false; - } - - @Override - public int hashCode() { - int h1 = _urn != null ? _urn.hashCode() : 0; - int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; - return 31 * h1 + h2; - } - - @Override - public String toString() { - return "(" + _urn + " , " + _versionStamp + ")"; - } - - private static boolean equals(Object o1, Object o2) { - if (o1 != null) { - return o1.equals(o2); - } - return o2 == null; - } - - /*convenient method*/ - public static VersionedUrn of(String urn, String versionStamp) { - return new VersionedUrn(urn, versionStamp); - } - + private final String _urn; + private final String _versionStamp; + + public VersionedUrn(String urn, String versionStamp) { + _urn = urn; + _versionStamp = versionStamp; + } + + public String getUrn() { + return _urn; + } + + public String getVersionStamp() { + return _versionStamp; + } + + @SuppressWarnings("unchecked") + @Override + public boolean equals(Object obj) { + if (obj instanceof VersionedUrn) { + VersionedUrn other = (VersionedUrn) obj; + return equals(_urn, other._urn) && equals(_versionStamp, other._versionStamp); + } + return false; + } + + @Override + public int hashCode() { + int h1 = _urn != null ? _urn.hashCode() : 0; + int h2 = _versionStamp != null ? _versionStamp.hashCode() : 0; + return 31 * h1 + h2; + } + + @Override + public String toString() { + return "(" + _urn + " , " + _versionStamp + ")"; + } + + private static boolean equals(Object o1, Object o2) { + if (o1 != null) { + return o1.equals(o2); + } + return o2 == null; + } + + /*convenient method*/ + public static VersionedUrn of(String urn, String versionStamp) { + return new VersionedUrn(urn, versionStamp); + } } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java index f7e0b6c99e334..2bae15bd19354 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/common/urn/VersionedUrnUtils.java @@ -5,11 +5,9 @@ import java.util.SortedMap; import org.apache.commons.lang3.StringUtils; - public class VersionedUrnUtils { - private VersionedUrnUtils() { - } + private VersionedUrnUtils() {} public static Map convertVersionStamp(String versionStamp) { Map aspectVersionMap = new HashMap<>(); @@ -20,7 +18,8 @@ public static Map convertVersionStamp(String versionStamp) { for (String pair : aspectNameVersionPairs) { String[] tokens = pair.split(":"); if (tokens.length != 2) { - throw new IllegalArgumentException("Invalid version stamp cannot be parsed: " + versionStamp); + throw new IllegalArgumentException( + "Invalid version stamp cannot be parsed: " + versionStamp); } try { aspectVersionMap.put(tokens[0], Long.valueOf(tokens[1])); @@ -33,10 +32,13 @@ public static Map convertVersionStamp(String versionStamp) { } public static String constructVersionStamp(SortedMap versionStampMap) { - StringBuilder versionStamp = versionStampMap.entrySet().stream() - .collect(StringBuilder::new, (builder, entry) -> builder.append(entry.getKey()) - .append(":") - .append(entry.getValue()).append(";"), StringBuilder::append); + StringBuilder versionStamp = + versionStampMap.entrySet().stream() + .collect( + StringBuilder::new, + (builder, entry) -> + builder.append(entry.getKey()).append(":").append(entry.getValue()).append(";"), + StringBuilder::append); // trim off last ; return versionStamp.substring(0, versionStamp.length() - 1); } diff --git a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java index 14949d9c946d9..880fcc2843333 100644 --- a/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java +++ b/li-utils/src/main/javaPegasus/com/linkedin/util/VersionedUrnCoercer.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.DirectCoercer; import com.linkedin.data.template.TemplateOutputCastException; - public class VersionedUrnCoercer implements DirectCoercer { static { Custom.registerCoercer(new VersionedUrnCoercer(), VersionedUrn.class); diff --git a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java index c0ddbb710e2ee..ea878c41936ae 100644 --- a/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java +++ b/li-utils/src/test/java/com/linkedin/common/urn/DatasetFieldUrnTest.java @@ -5,7 +5,6 @@ import org.assertj.core.api.Assertions; import org.testng.annotations.Test; - public class DatasetFieldUrnTest { private static final String PLATFORM = "fooPlatform"; @@ -16,39 +15,45 @@ public class DatasetFieldUrnTest { @Test public void testSerialization() throws URISyntaxException { final String datasetFieldString = - String.format("urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", PLATFORM, DATASET_NAME, - FABRIC_TYPE, FIELD_NAME); + String.format( + "urn:li:datasetField:(urn:li:dataset:(urn:li:dataPlatform:%s,%s,%s),%s)", + PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetFieldUrn datasetFieldUrn = DatasetFieldUrn.deserialize(datasetFieldString); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); Assertions.assertThat(datasetFieldUrn.toString()) .isEqualTo(datasetFieldString) - .describedAs("serialization followed by deserialization should produce the same urn string"); + .describedAs( + "serialization followed by deserialization should produce the same urn string"); } @Test public void testCreateUrn() { - final DatasetFieldUrn datasetFieldUrn = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn.getDatasetEntity(); Assertions.assertThat(datasetFieldUrn.getFieldPathEntity()).isEqualTo(FIELD_NAME); Assertions.assertThat(datasetUrn.getDatasetNameEntity()).isEqualTo(DATASET_NAME); - Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()).isEqualTo(PLATFORM); + Assertions.assertThat(datasetUrn.getPlatformEntity().getPlatformNameEntity()) + .isEqualTo(PLATFORM); Assertions.assertThat(datasetUrn.getOriginEntity()).isEqualTo(FabricType.PROD); } @Test public void testUrnConstructors() { - final DatasetFieldUrn datasetFieldUrn1 = new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); + final DatasetFieldUrn datasetFieldUrn1 = + new DatasetFieldUrn(PLATFORM, DATASET_NAME, FABRIC_TYPE, FIELD_NAME); final DatasetUrn datasetUrn = datasetFieldUrn1.getDatasetEntity(); final DatasetFieldUrn datasetFieldUrn2 = new DatasetFieldUrn(datasetUrn, FIELD_NAME); Assertions.assertThat(datasetFieldUrn1).isEqualTo(datasetFieldUrn2); } -} \ No newline at end of file +} diff --git a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java index 76668abf4e5ce..f2d58c80177fb 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/ModelUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectFoo; import com.datahub.test.testing.DeltaUnion; @@ -39,10 +42,6 @@ import org.testng.annotations.Test; import org.testng.collections.Lists; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class ModelUtilsTest { class ChildUrn extends Urn { @@ -71,7 +70,8 @@ public void testGetInvalidAspectClass() { @Test public void testGetValidAspectTypes() { - Set> validTypes = ModelUtils.getValidAspectTypes(EntityAspectUnion.class); + Set> validTypes = + ModelUtils.getValidAspectTypes(EntityAspectUnion.class); assertEquals(validTypes, ImmutableSet.of(AspectFoo.class, AspectBar.class)); } @@ -172,7 +172,8 @@ public void testGetUrnFromEntity() { public void testGetUrnFromRelationship() { FooUrn expectedSource = makeFooUrn(1); BarUrn expectedDestination = makeBarUrn(1); - RelationshipFoo relationship = new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); + RelationshipFoo relationship = + new RelationshipFoo().setSource(expectedSource).setDestination(expectedDestination); Urn sourceUrn = ModelUtils.getSourceUrnFromRelationship(relationship); Urn destinationUrn = ModelUtils.getDestinationUrnFromRelationship(relationship); @@ -269,7 +270,8 @@ public void testNewSnapshot() { EntityAspectUnion aspectUnion = new EntityAspectUnion(); aspectUnion.setAspectFoo(foo); - EntitySnapshot snapshot = ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); + EntitySnapshot snapshot = + ModelUtils.newSnapshot(EntitySnapshot.class, urn, Lists.newArrayList(aspectUnion)); assertEquals(snapshot.getUrn(), urn); assertEquals(snapshot.getAspects().size(), 1); @@ -289,7 +291,8 @@ public void testNewAspect() { public void testNewAspectAlias() { AspectFoo foo = new AspectFoo().setValue("foo"); - EntityAspectUnionAlias aspectUnion = ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); + EntityAspectUnionAlias aspectUnion = + ModelUtils.newAspectUnion(EntityAspectUnionAlias.class, foo); assertEquals(aspectUnion.getFoo(), foo); } @@ -337,18 +340,22 @@ public void testValidateCorrectUrnForSnapshot() { @Test public void testNewRelatioshipUnion() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnion relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); + RelationshipUnion relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnion.class, foo); assertEquals(relationshipUnion.getRelationshipFoo(), foo); } @Test public void testNewRelatioshipUnionAlias() { - RelationshipFoo foo = new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); + RelationshipFoo foo = + new RelationshipFoo().setDestination(makeFooUrn(1)).setSource(makeFooUrn(2)); - RelationshipUnionAlias relationshipUnion = ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); + RelationshipUnionAlias relationshipUnion = + ModelUtils.newRelationshipUnion(RelationshipUnionAlias.class, foo); assertEquals(relationshipUnion.getFoo(), foo); } @@ -358,11 +365,14 @@ public void testGetMAETopicName() throws URISyntaxException { FooUrn urn = new FooUrn(1); AspectFoo foo = new AspectFoo().setValue("foo"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(urn, foo), "METADATA_AUDIT_EVENT_FOO_ASPECTFOO"); PizzaUrn pizza = new PizzaUrn(1); AspectBar bar = new AspectBar().setValue("bar"); - assertEquals(ModelUtils.getAspectSpecificMAETopicName(pizza, bar), "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); + assertEquals( + ModelUtils.getAspectSpecificMAETopicName(pizza, bar), + "METADATA_AUDIT_EVENT_PIZZA_ASPECTBAR"); } @Test diff --git a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java index 90514a498c67a..145ab2322adb0 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/RecordUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.common.util; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.test.testing.AspectBar; import com.datahub.test.testing.AspectBaz; import com.datahub.test.testing.AspectFoo; @@ -29,17 +32,16 @@ import org.apache.commons.io.IOUtils; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class RecordUtilsTest { @Test public void testToJsonString() throws IOException { AspectFoo foo = new AspectFoo().setValue("foo"); String expected = - loadJsonFromResource("foo.json").replaceAll("\\s+", "").replaceAll("\\n", "").replaceAll("\\r", ""); + loadJsonFromResource("foo.json") + .replaceAll("\\s+", "") + .replaceAll("\\n", "") + .replaceAll("\\r", ""); String actual = RecordUtils.toJsonString(foo); @@ -55,7 +57,8 @@ public void testToRecordTemplate() throws IOException { assertEquals(actual, expected); - RecordTemplate actual2 = RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); + RecordTemplate actual2 = + RecordUtils.toRecordTemplate(AspectFoo.class.getCanonicalName(), expected.data()); assertEquals(actual2.getClass(), AspectFoo.class); assertEquals(actual2, expected); @@ -71,7 +74,8 @@ public void testGetValidRecordDataSchemaField() { RecordDataSchema schema = ValidationUtils.getRecordSchema(AspectFoo.class); RecordDataSchema.Field expected = schema.getField("value"); - assertEquals(RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); + assertEquals( + RecordUtils.getRecordDataSchemaField(new AspectFoo().setValue("foo"), "value"), expected); } @Test(expectedExceptions = InvalidSchemaException.class) @@ -112,7 +116,8 @@ public void testGetRecordTemplatePrimitiveField() throws IOException { assertTrue(RecordUtils.getRecordTemplateField(baz, "boolField", Boolean.class)); assertEquals(RecordUtils.getRecordTemplateField(baz, "stringField", String.class), "baz"); - assertEquals(RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); + assertEquals( + RecordUtils.getRecordTemplateField(baz, "longField", Long.class), Long.valueOf(1234L)); } @Test @@ -127,9 +132,10 @@ public void testGetRecordTemplateUrnField() { public void testGetRecordTemplateWrappedField() throws IOException { AspectBaz baz = loadAspectBaz("baz.json"); - StringArray stringArray = RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); + StringArray stringArray = + RecordUtils.getRecordTemplateWrappedField(baz, "arrayField", StringArray.class); - assertEquals(stringArray.toArray(), new String[]{"1", "2", "3"}); + assertEquals(stringArray.toArray(), new String[] {"1", "2", "3"}); } @Test @@ -241,7 +247,10 @@ public void testGetFieldValueRecordType() { MixedRecord mixedRecord1 = new MixedRecord().setRecordField(foo1); PathSpec ps1f1 = MixedRecord.fields().recordField().value(); PathSpec ps1f2 = - MixedRecord.fields().nestedRecordField().foo().value(); // referencing a nullable record template field + MixedRecord.fields() + .nestedRecordField() + .foo() + .value(); // referencing a nullable record template field Optional o1f1 = RecordUtils.getFieldValue(mixedRecord1, ps1f1); Optional o1f2 = RecordUtils.getFieldValue(mixedRecord1, ps1f2); @@ -253,7 +262,8 @@ public void testGetFieldValueRecordType() { // case 2: referencing a field inside a RecordTemplate, two levels deep i.e. nested field AspectFoo foo2 = new AspectFoo().setValue("fooVal2"); - com.datahub.test.testing.EntityValue entityValue = new com.datahub.test.testing.EntityValue().setFoo(foo2); + com.datahub.test.testing.EntityValue entityValue = + new com.datahub.test.testing.EntityValue().setFoo(foo2); MixedRecord mixedRecord2 = new MixedRecord().setNestedRecordField(entityValue); PathSpec ps2 = MixedRecord.fields().nestedRecordField().foo().value(); @@ -268,7 +278,8 @@ public void testGetFieldValueArray() { // case 1: array of strings final MixedRecord mixedRecord1 = - new MixedRecord().setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); + new MixedRecord() + .setStringArray(new StringArray(Arrays.asList("val1", "val2", "val3", "val4"))); PathSpec ps1 = MixedRecord.fields().stringArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -293,20 +304,25 @@ public void testGetFieldValueArray() { // case 3: array of records is empty final MixedRecord mixedRecord3 = new MixedRecord().setRecordArray(new AspectFooArray()); - Object o3 = RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()).get(); + Object o3 = + RecordUtils.getFieldValue(mixedRecord3, MixedRecord.fields().recordArray().items().value()) + .get(); assertEquals(o3, new StringArray()); // case 4: referencing an index of array is not supported final MixedRecord mixedRecord4 = new MixedRecord().setRecordArray(aspectFooArray); - assertThrows(UnsupportedOperationException.class, + assertThrows( + UnsupportedOperationException.class, () -> RecordUtils.getFieldValue(mixedRecord4, "/recordArray/0/value")); // case 5: referencing nested field inside array of records, field being 2 levels deep AspectFoo f1 = new AspectFoo().setValue("val1"); AspectFoo f2 = new AspectFoo().setValue("val2"); - com.datahub.test.testing.EntityValue val1 = new com.datahub.test.testing.EntityValue().setFoo(f1); - com.datahub.test.testing.EntityValue val2 = new com.datahub.test.testing.EntityValue().setFoo(f2); + com.datahub.test.testing.EntityValue val1 = + new com.datahub.test.testing.EntityValue().setFoo(f1); + com.datahub.test.testing.EntityValue val2 = + new com.datahub.test.testing.EntityValue().setFoo(f2); EntityValueArray entityValues = new EntityValueArray(Arrays.asList(val1, val2)); final MixedRecord mixedRecord5 = new MixedRecord().setNestedRecordArray(entityValues); @@ -333,17 +349,21 @@ public void testGetFieldValueArray() { assertFalse(o7.isPresent()); } - @Test(description = "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") + @Test( + description = + "Test getFieldValue() when RecordTemplate has field of type array of primitive unions") public void testGetFieldValueArrayOfPrimitiveUnions() { // case 1: array of unions of strings final MixedRecord mixedRecord1 = - new MixedRecord().setUnionArray(new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + new MixedRecord() + .setUnionArray( + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); PathSpec ps1 = MixedRecord.fields().unionArray(); Object o1 = RecordUtils.getFieldValue(mixedRecord1, ps1).get(); @@ -351,20 +371,24 @@ public void testGetFieldValueArrayOfPrimitiveUnions() { PathSpec ps2 = MixedRecord.fields().unionArray().items(); Object o2 = RecordUtils.getFieldValue(mixedRecord1, ps2).get(); - assertEquals(o1, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o1, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps1.toString(), "/unionArray"); - assertEquals(o2, new StringUnionArray(Arrays.asList( - StringUnion.create("val1"), - StringUnion.create("val2"), - StringUnion.create("val3"), - StringUnion.create("val4") - ))); + assertEquals( + o2, + new StringUnionArray( + Arrays.asList( + StringUnion.create("val1"), + StringUnion.create("val2"), + StringUnion.create("val3"), + StringUnion.create("val4")))); assertEquals(ps2.toString(), "/unionArray/*"); } @@ -381,8 +405,9 @@ public void testCapitalizeFirst() { } private AspectBaz loadAspectBaz(String resourceName) throws IOException { - return RecordUtils.toRecordTemplate(AspectBaz.class, - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); + return RecordUtils.toRecordTemplate( + AspectBaz.class, + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), StandardCharsets.UTF_8)); } - } diff --git a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java index cb5ac62d71a1d..93875b0f06706 100644 --- a/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java +++ b/li-utils/src/test/java/com/linkedin/common/util/VersionedUrnUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.common.util; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.VersionedUrnUtils; import java.util.Comparator; import java.util.Map; @@ -7,9 +9,6 @@ import java.util.TreeMap; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class VersionedUrnUtilsTest { private static final String SCHEMA_METADATA = "schemaMetadata"; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java index b8b62782309b8..2ae9ee8ab14ea 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java @@ -5,28 +5,24 @@ import lombok.AllArgsConstructor; import lombok.Getter; - /** - * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub Actors includes a + * Represents a unique DataHub actor (i.e. principal). Defining characteristics of all DataHub + * Actors includes a * - * a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. - * b) Actor Id: A unique id for the actor. + *

a) Actor Type: A specific type of actor, e.g. CORP_USER or SERVICE_USER. b) Actor Id: A unique + * id for the actor. * - * These pieces of information are in turn used to construct an Entity Urn, which can be used as a primary key to fetch and update specific information - * about the actor. + *

These pieces of information are in turn used to construct an Entity Urn, which can be used as + * a primary key to fetch and update specific information about the actor. */ @Getter @AllArgsConstructor public class Actor { - /** - * The {@link ActorType} associated with a DataHub actor. - */ + /** The {@link ActorType} associated with a DataHub actor. */ private final ActorType type; - /** - * The unique id associated with a DataHub actor. - */ + /** The unique id associated with a DataHub actor. */ private final String id; /** @@ -37,6 +33,7 @@ public String toUrnStr() { if (Objects.requireNonNull(getType()) == ActorType.USER) { return String.format("urn:li:corpuser:%s", getId()); } - throw new IllegalArgumentException(String.format("Unrecognized ActorType %s provided", getType())); + throw new IllegalArgumentException( + String.format("Unrecognized ActorType %s provided", getType())); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java index c41a30e57b2d6..4fc175cd4815e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/ActorType.java @@ -3,12 +3,10 @@ /** * A specific type of Actor on DataHub's platform. * - * Currently the only actor type officially supported, though in the future this may evolve - * to include service users. + *

Currently the only actor type officially supported, though in the future this may evolve to + * include service users. */ public enum ActorType { - /** - * A user actor, e.g. john smith - */ + /** A user actor, e.g. john smith */ USER, } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java index 71efedda56e5c..b53d868e6e878 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Authentication.java @@ -5,21 +5,21 @@ import java.util.Objects; import javax.annotation.Nonnull; - -/** - * Class representing an authenticated actor accessing DataHub. - */ +/** Class representing an authenticated actor accessing DataHub. */ public class Authentication { private final Actor authenticatedActor; private final String credentials; private final Map claims; - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { + public Authentication( + @Nonnull final Actor authenticatedActor, @Nonnull final String credentials) { this(authenticatedActor, credentials, Collections.emptyMap()); } - public Authentication(@Nonnull final Actor authenticatedActor, @Nonnull final String credentials, + public Authentication( + @Nonnull final Actor authenticatedActor, + @Nonnull final String credentials, @Nonnull final Map claims) { this.authenticatedActor = Objects.requireNonNull(authenticatedActor); this.credentials = Objects.requireNonNull(credentials); @@ -34,7 +34,8 @@ public Actor getActor() { } /** - * @return Returns the credentials associated with the current request (e.g. the value of the "Authorization" header) + * @return Returns the credentials associated with the current request (e.g. the value of the + * "Authorization" header) */ public String getCredentials() { return this.credentials; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java index e95f891b853a5..3a59b23122e25 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationContext.java @@ -19,6 +19,5 @@ public static void remove() { AUTHENTICATION.remove(); } - private AuthenticationContext() { - } + private AuthenticationContext() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java index 9fbac00d3aeb5..5928b258c5f80 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolved an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java index 0a8e3cba3d07b..500248d6c7b4e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationExpiredException.java @@ -2,7 +2,6 @@ import com.datahub.plugins.auth.authentication.Authenticator; - /** * An {@link Exception} thrown when an {@link Authenticator} is unable to be resolve an instance of * {@link Authentication} for the current request. diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java index 5673bac5442b2..9aaf40df5a0f6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticationRequest.java @@ -1,18 +1,16 @@ package com.datahub.authentication; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.Getter; - import java.util.Map; import java.util.Objects; import java.util.TreeMap; import javax.annotation.Nonnull; - +import lombok.Getter; /** * Request context provided to each {@link Authenticator} to perform Authentication. * - * Currently, this class only hold the inbound request's headers, but could certainly be extended + *

Currently, this class only hold the inbound request's headers, but could certainly be extended * to contain additional information like the request parameters, body, ip, etc as needed. */ @Getter @@ -27,7 +25,10 @@ public AuthenticationRequest(@Nonnull final Map requestHeaders) this("", "", requestHeaders); } - public AuthenticationRequest(@Nonnull String servletInfo, @Nonnull String pathInfo, @Nonnull final Map requestHeaders) { + public AuthenticationRequest( + @Nonnull String servletInfo, + @Nonnull String pathInfo, + @Nonnull final Map requestHeaders) { Objects.requireNonNull(requestHeaders); caseInsensitiveHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); caseInsensitiveHeaders.putAll(requestHeaders); diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java index 4c6ee071e5ca1..06a70d55c0802 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/AuthenticatorContext.java @@ -6,10 +6,10 @@ import java.util.Objects; import javax.annotation.Nonnull; - /** - * Context class to provide Authenticator implementations with concrete objects necessary for their correct workings. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context class to provide Authenticator implementations with concrete objects necessary for their + * correct workings. DataHub creates {@link AuthenticatorContext} instance and provides it as an + * argument to init method of {@link Authenticator} */ public class AuthenticatorContext { private final Map contextMap; @@ -21,10 +21,9 @@ public AuthenticatorContext(@Nonnull final Map context) { } /** - * - * @return contextMap The contextMap contains below key and value - * {@link com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is installed - * + * @return contextMap The contextMap contains below key and value {@link + * com.datahub.plugins.PluginConstant#PLUGIN_HOME PLUGIN_HOME}: Directory path where plugin is + * installed */ @Nonnull public Map data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java index e159993a8a243..f8d08c6adbd3a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java @@ -5,22 +5,22 @@ import java.util.Optional; import javax.annotation.Nonnull; - public class AuthUtil { public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull Optional maybeResourceSpec, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorized(authorizer, actor, andPrivilegeGroup, maybeResourceSpec)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -28,15 +28,16 @@ public static boolean isAuthorizedForResources( @Nonnull Authorizer authorizer, @Nonnull String actor, @Nonnull List> resourceSpecs, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : privilegeGroup.getAuthorizedPrivilegeGroups()) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { + for (ConjunctivePrivilegeGroup andPrivilegeGroup : + privilegeGroup.getAuthorizedPrivilegeGroups()) { // If any conjunctive privilege group is authorized, then the entire request is authorized. if (isAuthorizedForResources(authorizer, actor, andPrivilegeGroup, resourceSpecs)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not authorized. + // If none of the disjunctive privilege groups were authorized, then the entire request is not + // authorized. return false; } @@ -67,7 +68,8 @@ private static boolean isAuthorizedForResources( for (final String privilege : requiredPrivileges.getRequiredPrivileges()) { // Create and evaluate an Authorization request. for (Optional resourceSpec : resourceSpecs) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege, resourceSpec); + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege, resourceSpec); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { // Short circuit. @@ -78,5 +80,5 @@ private static boolean isAuthorizedForResources( return true; } - private AuthUtil() { } -} \ No newline at end of file + private AuthUtil() {} +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java index 9e75de3cbf44d..62889a50d2d96 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationRequest.java @@ -3,23 +3,18 @@ import java.util.Optional; import lombok.Value; - -/** - * A request to authorize a user for a specific privilege. - */ +/** A request to authorize a user for a specific privilege. */ @Value public class AuthorizationRequest { - /** - * The urn of the actor (corpuser) making the request. - */ + /** The urn of the actor (corpuser) making the request. */ String actorUrn; - /** - * The privilege that the user is requesting - */ + + /** The privilege that the user is requesting */ String privilege; + /** - * The resource that the user is requesting for, if applicable. If the privilege is a platform privilege - * this optional will be empty. + * The resource that the user is requesting for, if applicable. If the privilege is a platform + * privilege this optional will be empty. */ Optional resourceSpec; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java index 17d199be583e3..a8eea06dfab27 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizationResult.java @@ -3,39 +3,24 @@ import lombok.AllArgsConstructor; import lombok.Data; - -/** - * A result returned after requesting authorization for a particular privilege. - */ +/** A result returned after requesting authorization for a particular privilege. */ @Data @AllArgsConstructor public class AuthorizationResult { - /** - * The original authorization request - */ + /** The original authorization request */ AuthorizationRequest request; - /** - * The result type. Allow or deny the authorization request for the actor. - */ + /** The result type. Allow or deny the authorization request for the actor. */ public enum Type { - /** - * Allow the request - the requested actor is privileged. - */ + /** Allow the request - the requested actor is privileged. */ ALLOW, - /** - * Deny the request - the requested actor is not privileged. - */ + /** Deny the request - the requested actor is not privileged. */ DENY } - /** - * The decision - whether to allow or deny the request. - */ + /** The decision - whether to allow or deny the request. */ public Type type; - /** - * Optional message associated with the decision. Useful for debugging. - */ + /** Optional message associated with the decision. Useful for debugging. */ String message; } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java index 5a9990552bb34..0155c49fd9da7 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizedActors.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java index b79a4fa20c7ea..50bc749cd9921 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthorizerContext.java @@ -7,25 +7,21 @@ import lombok.AllArgsConstructor; import lombok.Data; - /** - * Context provided to an Authorizer on initialization. - * DataHub creates {@link AuthenticatorContext} instance and provides it as an argument to init method of {@link Authenticator} + * Context provided to an Authorizer on initialization. DataHub creates {@link AuthenticatorContext} + * instance and provides it as an argument to init method of {@link Authenticator} */ @Data @AllArgsConstructor public class AuthorizerContext { private final Map contextMap; - /** - * A utility for resolving an {@link EntitySpec} to resolved entity field values. - */ + /** A utility for resolving an {@link EntitySpec} to resolved entity field values. */ private EntitySpecResolver entitySpecResolver; /** - * - * @return contextMap The contextMap contains below key and value - * PLUGIN_DIRECTORY: Directory path where plugin is installed i.e. PLUGIN_HOME + * @return contextMap The contextMap contains below key and value PLUGIN_DIRECTORY: Directory path + * where plugin is installed i.e. PLUGIN_HOME */ @Nonnull public Map data() { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java index d47783268f70d..bc3a3c9f385a6 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java @@ -2,12 +2,10 @@ import java.util.List; - /** - * Represents a group of privileges that must ALL be required to - * authorize a request. + * Represents a group of privileges that must ALL be required to authorize a request. * - * That is, an AND of privileges. + *

That is, an AND of privileges. */ public class ConjunctivePrivilegeGroup { private final List _requiredPrivileges; diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java index 40bb22d036f0a..350476326da9f 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java @@ -5,7 +5,7 @@ /** * Represents a group of privilege groups, any of which must be authorized to authorize a request. * - * That is, an OR of privilege groups. + *

That is, an OR of privilege groups. */ public class DisjunctivePrivilegeGroup { private final List _authorizedPrivilegeGroups; @@ -17,4 +17,4 @@ public DisjunctivePrivilegeGroup(List authorizedPrivi public List getAuthorizedPrivilegeGroups() { return _authorizedPrivilegeGroups; } -} \ No newline at end of file +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java index 1258d958f2092..6b08cdb00e9ab 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntityFieldType.java @@ -1,44 +1,32 @@ package com.datahub.authorization; -/** - * List of entity field types to fetch for a given entity - */ +/** List of entity field types to fetch for a given entity */ public enum EntityFieldType { /** * Type of the entity (e.g. dataset, chart) + * * @deprecated */ @Deprecated RESOURCE_URN, /** * Urn of the entity + * * @deprecated */ @Deprecated RESOURCE_TYPE, - /** - * Type of the entity (e.g. dataset, chart) - */ + /** Type of the entity (e.g. dataset, chart) */ TYPE, - /** - * Urn of the entity - */ + /** Urn of the entity */ URN, - /** - * Owners of the entity - */ + /** Owners of the entity */ OWNER, - /** - * Domains of the entity - */ + /** Domains of the entity */ DOMAIN, - /** - * Groups of which the entity (only applies to corpUser) is a member - */ + /** Groups of which the entity (only applies to corpUser) is a member */ GROUP_MEMBERSHIP, - /** - * Data platform instance of resource - */ + /** Data platform instance of resource */ DATA_PLATFORM_INSTANCE } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java index 656bec0f44fc2..eb412cdeff14e 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpec.java @@ -3,21 +3,19 @@ import javax.annotation.Nonnull; import lombok.Value; - /** - * Details about the entities involved in the authorization process. It models the actor and the resource being acted - * upon. Resource types currently supported can be found inside of {@link com.linkedin.metadata.authorization.PoliciesConfig} + * Details about the entities involved in the authorization process. It models the actor and the + * resource being acted upon. Resource types currently supported can be found inside of {@link + * com.linkedin.metadata.authorization.PoliciesConfig} */ @Value public class EntitySpec { + /** The entity type. (dataset, chart, dashboard, corpGroup, etc). */ + @Nonnull String type; + /** - * The entity type. (dataset, chart, dashboard, corpGroup, etc). - */ - @Nonnull - String type; - /** - * The entity identity. Most often, this corresponds to the raw entity urn. (urn:li:corpGroup:groupId) + * The entity identity. Most often, this corresponds to the raw entity urn. + * (urn:li:corpGroup:groupId) */ - @Nonnull - String entity; -} \ No newline at end of file + @Nonnull String entity; +} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java index 67347fbf87a87..0d482f3816e28 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/EntitySpecResolver.java @@ -1,11 +1,10 @@ package com.datahub.authorization; /** - * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link ResolvedEntitySpec}. + * An Entity Spec Resolver is responsible for resolving a {@link EntitySpec} to a {@link + * ResolvedEntitySpec}. */ public interface EntitySpecResolver { - /** - Resolve a {@link EntitySpec} to a resolved entity spec. - **/ + /** Resolve a {@link EntitySpec} to a resolved entity spec. */ ResolvedEntitySpec resolve(EntitySpec entitySpec); } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java index 955a06fd54cb9..3e6287c335c97 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/FieldResolver.java @@ -10,32 +10,30 @@ import lombok.RequiredArgsConstructor; import lombok.Value; - /** - * Helper class for lazy resolution of fields - * Input resolveField function that is given as input will only be called when getFieldValuesFuture is called + * Helper class for lazy resolution of fields Input resolveField function that is given as input + * will only be called when getFieldValuesFuture is called */ @RequiredArgsConstructor public class FieldResolver { private final Supplier> resolveField; + @Getter(lazy = true) private final CompletableFuture fieldValuesFuture = resolveField.get(); private static final FieldValue EMPTY = new FieldValue(Collections.emptySet()); - /** - * Helper function that returns FieldResolver for precomputed values - */ + /** Helper function that returns FieldResolver for precomputed values */ public static FieldResolver getResolverFromValues(Set values) { - return new FieldResolver(() -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); + return new FieldResolver( + () -> CompletableFuture.completedFuture(FieldValue.builder().values(values).build())); } - /** - * Helper function that returns FieldResolver given a fetchFieldValue function - */ - public static FieldResolver getResolverFromFunction(EntitySpec entitySpec, - Function fetchFieldValue) { - return new FieldResolver(() -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); + /** Helper function that returns FieldResolver given a fetchFieldValue function */ + public static FieldResolver getResolverFromFunction( + EntitySpec entitySpec, Function fetchFieldValue) { + return new FieldResolver( + () -> CompletableFuture.supplyAsync(() -> fetchFieldValue.apply(entitySpec))); } public static FieldValue emptyFieldValue() { @@ -43,7 +41,8 @@ public static FieldValue emptyFieldValue() { } /** - * Container for storing the field value, in case we need to extend this to have more types of field values + * Container for storing the field value, in case we need to extend this to have more types of + * field values */ @Value @Builder diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java index 7948766df5715..0a639bed1082b 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ResolvedEntitySpec.java @@ -8,15 +8,14 @@ import lombok.RequiredArgsConstructor; import lombok.ToString; - /** - * Wrapper around authorization request with field resolvers for lazily fetching the field values for each field type + * Wrapper around authorization request with field resolvers for lazily fetching the field values + * for each field type */ @RequiredArgsConstructor @ToString public class ResolvedEntitySpec { - @Getter - private final EntitySpec spec; + @Getter private final EntitySpec spec; private final Map fieldResolvers; public Set getFieldValues(EntityFieldType entityFieldType) { @@ -28,6 +27,7 @@ public Set getFieldValues(EntityFieldType entityFieldType) { /** * Fetch the owners for an entity. + * * @return a set of owner urns, or empty set if none exist. */ public Set getOwners() { @@ -39,6 +39,7 @@ public Set getOwners() { /** * Fetch the platform instance for a Resolved Resource Spec + * * @return a Platform Instance or null if one does not exist. */ @Nullable @@ -46,7 +47,12 @@ public String getDataPlatformInstance() { if (!fieldResolvers.containsKey(EntityFieldType.DATA_PLATFORM_INSTANCE)) { return null; } - Set dataPlatformInstance = fieldResolvers.get(EntityFieldType.DATA_PLATFORM_INSTANCE).getFieldValuesFuture().join().getValues(); + Set dataPlatformInstance = + fieldResolvers + .get(EntityFieldType.DATA_PLATFORM_INSTANCE) + .getFieldValuesFuture() + .join() + .getValues(); if (dataPlatformInstance.size() > 0) { return dataPlatformInstance.stream().findFirst().get(); } @@ -55,12 +61,17 @@ public String getDataPlatformInstance() { /** * Fetch the group membership for an entity. + * * @return a set of groups urns, or empty set if none exist. */ public Set getGroupMembership() { if (!fieldResolvers.containsKey(EntityFieldType.GROUP_MEMBERSHIP)) { return Collections.emptySet(); } - return fieldResolvers.get(EntityFieldType.GROUP_MEMBERSHIP).getFieldValuesFuture().join().getValues(); + return fieldResolvers + .get(EntityFieldType.GROUP_MEMBERSHIP) + .getFieldValuesFuture() + .join() + .getValues(); } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java index 474dd7363e495..3b8406ad5ed5a 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/Plugin.java @@ -1,7 +1,4 @@ package com.datahub.plugins; -/** - * A tag interface for plugin - */ -public interface Plugin { -} +/** A tag interface for plugin */ +public interface Plugin {} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java index 03afc06af7f3c..ac72fc9b8f816 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/PluginConstant.java @@ -3,6 +3,5 @@ public class PluginConstant { public static final String PLUGIN_HOME = "PLUGIN_HOME"; - private PluginConstant() { - } + private PluginConstant() {} } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java index b7cf80384564b..6485495608773 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authentication/Authenticator.java @@ -10,30 +10,36 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided {@link AuthenticationRequest} - * to an instance of {@link Authentication}, which includes an authenticated {@link Actor} within. + * An {@link Authenticator}'s job is to authenticate an inbound request by resolving the provided + * {@link AuthenticationRequest} to an instance of {@link Authentication}, which includes an + * authenticated {@link Actor} within. * - * In the case that {@link Authentication} cannot be resolved, for example because the request is missing the required - * authentication information, an {@link AuthenticationException} may be thrown. + *

In the case that {@link Authentication} cannot be resolved, for example because the request is + * missing the required authentication information, an {@link AuthenticationException} may be + * thrown. */ public interface Authenticator extends Plugin { /** * Initialize the Authenticator. Invoked once at boot time. * - * @param authenticatorConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "plugins[].params.configs" configuration. - * @param context nullable configuration objects that are potentially required by an Authenticator instance. + * @param authenticatorConfig config provided to the authenticator derived from the Metadata + * Service YAML config. This config comes from the "plugins[].params.configs" configuration. + * @param context nullable configuration objects that are potentially required by an Authenticator + * instance. */ - void init(@Nonnull final Map authenticatorConfig, @Nullable final AuthenticatorContext context); + void init( + @Nonnull final Map authenticatorConfig, + @Nullable final AuthenticatorContext context); /** * Authenticates an inbound request given an instance of the {@link AuthenticationRequest}. - * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to be authenticated - * If the request is authenticated successfully, an instance of {@link Authentication} is returned. - * If the request cannot be authenticated, returns "null" or throws an {@link AuthenticationException}. + * + * @param authenticationRequest authentication request {@link AuthenticationRequest} that need to + * be authenticated If the request is authenticated successfully, an instance of {@link + * Authentication} is returned. If the request cannot be authenticated, returns "null" or + * throws an {@link AuthenticationException}. */ @Nullable Authentication authenticate(@Nonnull final AuthenticationRequest authenticationRequest) diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java index c731a3ec987c1..a6baf0b5b282c 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/plugins/auth/authorization/Authorizer.java @@ -10,27 +10,27 @@ import java.util.Optional; import javax.annotation.Nonnull; - /** - * An Authorizer is responsible for determining whether an actor should be granted a specific privilege. + * An Authorizer is responsible for determining whether an actor should be granted a specific + * privilege. */ public interface Authorizer extends Plugin { /** * Initialize the Authorizer. Invoked once at boot time. * - * @param authorizerConfig config provided to the authenticator derived from the Metadata Service YAML config. This - * config comes from the "authorization.authorizers.config" configuration. + * @param authorizerConfig config provided to the authenticator derived from the Metadata Service + * YAML config. This config comes from the "authorization.authorizers.config" configuration. */ - void init(@Nonnull final Map authorizerConfig, @Nonnull final AuthorizerContext ctx); + void init( + @Nonnull final Map authorizerConfig, @Nonnull final AuthorizerContext ctx); - /** - * Authorizes an action based on the actor, the resource, and required privileges. - */ + /** Authorizes an action based on the actor, the resource, and required privileges. */ AuthorizationResult authorize(@Nonnull final AuthorizationRequest request); /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ - AuthorizedActors authorizedActors(final String privilege, final Optional resourceSpec); + AuthorizedActors authorizedActors( + final String privilege, final Optional resourceSpec); } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java index d8d66ddeeb648..2ac16091128a2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/datahub/metadata/dao/producer/BaseMetadataEventProducer.java @@ -1,25 +1,25 @@ package com.datahub.metadata.dao.producer; -import com.linkedin.common.urn.Urn; import com.datahub.util.ModelUtils; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.UnionTemplate; import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** * A base class for all metadata event producers. * - *

See http://go/gma for more details. + *

See http://go/gma for more details. */ -public abstract class BaseMetadataEventProducer { +public abstract class BaseMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> { protected final Class _snapshotClass; protected final Class _aspectUnionClass; - public BaseMetadataEventProducer(@Nonnull Class snapshotClass, - @Nonnull Class aspectUnionClass) { + public BaseMetadataEventProducer( + @Nonnull Class snapshotClass, @Nonnull Class aspectUnionClass) { ModelUtils.validateSnapshotAspect(snapshotClass, aspectUnionClass); _snapshotClass = snapshotClass; _aspectUnionClass = aspectUnionClass; @@ -32,8 +32,8 @@ public BaseMetadataEventProducer(@Nonnull Class snapshotClass, * @param newValue the proposed new value for the metadata * @param must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue); + public abstract void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue); /** * Produces a Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. @@ -43,17 +43,17 @@ public abstract void produceSnapshotBasedMetadat * @param newValue the value after the update * @param must be a supported aspect type in {@code ASPECT_UNION} */ - public abstract void produceMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); /** - * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for an entity. + * Produces an aspect specific Metadata Audit Event (MAE) after a metadata aspect is updated for + * an entity. * * @param urn {@link Urn} of the entity * @param oldValue the value prior to the update, or null if there's none. * @param newValue the value after the update */ - public abstract void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); + public abstract void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue); } - diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java index 00b5bb75d901b..26b48449c1c2f 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java @@ -24,8 +24,9 @@ import org.apache.kafka.clients.producer.ProducerRecord; /** - *

The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. - * If none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + * The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j public class KafkaEventProducer implements EventProducer { @@ -41,8 +42,10 @@ public class KafkaEventProducer implements EventProducer { * @param topicConvention the convention to use to get kafka topic names * @param kafkaHealthChecker The {@link Callback} to invoke when the request is completed */ - public KafkaEventProducer(@Nonnull final Producer producer, - @Nonnull final TopicConvention topicConvention, @Nonnull final KafkaHealthChecker kafkaHealthChecker) { + public KafkaEventProducer( + @Nonnull final Producer producer, + @Nonnull final TopicConvention topicConvention, + @Nonnull final KafkaHealthChecker kafkaHealthChecker) { _producer = producer; _topicConvention = topicConvention; _kafkaHealthChecker = kafkaHealthChecker; @@ -50,13 +53,16 @@ public KafkaEventProducer(@Nonnull final Producer produceMetadataChangeLog(@Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, + public Future produceMetadataChangeLog( + @Nonnull final Urn urn, + @Nonnull AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", - urn, - metadataChangeLog)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeLog: %s", + urn, metadataChangeLog)); record = EventUtils.pegasusToAvroMCL(metadataChangeLog); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus MAE to Avro: %s", metadataChangeLog), e); @@ -67,38 +73,42 @@ record = EventUtils.pegasusToAvroMCL(metadataChangeLog); if (aspectSpec.isTimeseries()) { topic = _topicConvention.getMetadataChangeLogTimeseriesTopicName(); } - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCL", urn.toString())); } @Override @WithSpan - public Future produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull final MetadataChangeProposal metadataChangeProposal) { + public Future produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull final MetadataChangeProposal metadataChangeProposal) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", - urn, - metadataChangeProposal)); + log.debug( + String.format( + "Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", + urn, metadataChangeProposal)); record = EventUtils.pegasusToAvroMCP(metadataChangeProposal); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); + log.error( + String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); throw new ModelConversionException("Failed to convert Pegasus MCP to Avro", e); } String topic = _topicConvention.getMetadataChangeProposalTopicName(); - return _producer.send(new ProducerRecord(topic, urn.toString(), record), - _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); + return _producer.send( + new ProducerRecord(topic, urn.toString(), record), + _kafkaHealthChecker.getKafkaCallBack("MCP", urn.toString())); } @Override - public Future producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { + public Future producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { GenericRecord record; try { - log.debug(String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", - name, - event)); + log.debug( + String.format("Converting Pegasus Event to Avro Event urn %s\nEvent: %s", name, event)); record = EventUtils.pegasusToAvroPE(event); } catch (IOException e) { log.error(String.format("Failed to convert Pegasus Platform Event to Avro: %s", event), e); @@ -106,8 +116,9 @@ record = EventUtils.pegasusToAvroPE(event); } final String topic = _topicConvention.getPlatformEventTopicName(); - return _producer.send(new ProducerRecord(topic, key == null ? name : key, record), - _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); + return _producer.send( + new ProducerRecord(topic, key == null ? name : key, record), + _kafkaHealthChecker.getKafkaCallBack("Platform Event", name)); } @Override @@ -117,12 +128,17 @@ public void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEven log.debug(String.format("Converting Pegasus Event to Avro Event\nEvent: %s", event)); record = EventUtils.pegasusToAvroDUHE(event); } catch (IOException e) { - log.error(String.format("Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), e); + log.error( + String.format( + "Failed to convert Pegasus DataHub Upgrade History Event to Avro: %s", event), + e); throw new ModelConversionException("Failed to convert Pegasus Platform Event to Avro", e); } final String topic = _topicConvention.getDataHubUpgradeHistoryTopicName(); - _producer.send(new ProducerRecord(topic, event.getVersion(), record), _kafkaHealthChecker - .getKafkaCallBack("History Event", "Event Version: " + event.getVersion())); + _producer.send( + new ProducerRecord(topic, event.getVersion(), record), + _kafkaHealthChecker.getKafkaCallBack( + "History Event", "Event Version: " + event.getVersion())); } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java index 8fc89a8ddd5ed..1bfd829617e09 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaHealthChecker.java @@ -2,6 +2,10 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.producer.Callback; @@ -10,107 +14,108 @@ import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Component; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.stream.Collectors; - @Slf4j @EnableScheduling @Component public class KafkaHealthChecker { - @Value("${kafka.producer.deliveryTimeout}") - private long kafkaProducerDeliveryTimeout; - - private final Set messagesInProgress = ConcurrentHashMap.newKeySet(); - - public Callback getKafkaCallBack(String eventType, String entityDesc) { - final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); - sendMessageStarted(tracking); - return (metadata, e) -> { - sendMessageEnded(tracking); - if (e != null) { - log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); - MetricUtils.counter(this.getClass(), - MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))).inc(); - } else { - log.debug(String.format( - "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", - eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); - } - }; + @Value("${kafka.producer.deliveryTimeout}") + private long kafkaProducerDeliveryTimeout; + + private final Set messagesInProgress = ConcurrentHashMap.newKeySet(); + + public Callback getKafkaCallBack(String eventType, String entityDesc) { + final MessageLog tracking = MessageLog.track(entityDesc, kafkaProducerDeliveryTimeout); + sendMessageStarted(tracking); + return (metadata, e) -> { + sendMessageEnded(tracking); + if (e != null) { + log.error(String.format("Failed to emit %s for entity %s", eventType, entityDesc), e); + MetricUtils.counter( + this.getClass(), + MetricRegistry.name("producer_failed_count", eventType.replaceAll(" ", "_"))) + .inc(); + } else { + log.debug( + String.format( + "Successfully emitted %s for entity %s at offset %s, partition %s, topic %s", + eventType, entityDesc, metadata.offset(), metadata.partition(), metadata.topic())); + } + }; + } + + private void sendMessageStarted(MessageLog messageLog) { + messagesInProgress.add(messageLog); + } + + private void sendMessageEnded(MessageLog messageLog) { + messagesInProgress.remove(messageLog); + } + + @Scheduled(cron = "0/60 * * * * ?") + private synchronized void periodicKafkaHealthChecker() { + long moment = System.currentTimeMillis(); + Set oldItems = + messagesInProgress.stream() + .filter(item -> item.expectedMilli < moment) + .collect(Collectors.toSet()); + + if (oldItems.size() > 0) { + Map itemCounts = + oldItems.stream() + .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); + log.error( + String.format( + "Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); + messagesInProgress.removeAll(oldItems); } + } - private void sendMessageStarted(MessageLog messageLog) { - messagesInProgress.add(messageLog); + @Getter + static class MessageLog { + private final String entityDesc; + private final long uniqueMessageId; + private final long expectedMilli; + private static long lastMoment = 0L; + + public static MessageLog track(String entityDesc, long maxDelayMilli) { + return new MessageLog(entityDesc, maxDelayMilli); } - private void sendMessageEnded(MessageLog messageLog) { - messagesInProgress.remove(messageLog); + private MessageLog(String entityDesc, long maxDelayMilli) { + this.entityDesc = entityDesc; + this.uniqueMessageId = getNextUniqueMoment(); + this.expectedMilli = this.uniqueMessageId + maxDelayMilli; } - @Scheduled(cron = "0/60 * * * * ?") - private synchronized void periodicKafkaHealthChecker() { - long moment = System.currentTimeMillis(); - Set oldItems = messagesInProgress.stream() - .filter(item -> item.expectedMilli < moment) - .collect(Collectors.toSet()); - - if (oldItems.size() > 0) { - Map itemCounts = oldItems.stream() - .collect(Collectors.groupingBy(MessageLog::getEntityDesc, Collectors.counting())); - log.error(String.format("Kafka Health Check Failed. Old message(s) were waiting to be sent: %s", itemCounts)); - messagesInProgress.removeAll(oldItems); - } + private synchronized long getNextUniqueMoment() { + long moment = System.currentTimeMillis(); + lastMoment = moment != lastMoment ? moment : ++lastMoment; + return lastMoment; } - @Getter - static class MessageLog { - private final String entityDesc; - private final long uniqueMessageId; - private final long expectedMilli; - private static long lastMoment = 0L; - - - public static MessageLog track(String entityDesc, long maxDelayMilli) { - return new MessageLog(entityDesc, maxDelayMilli); - } - private MessageLog(String entityDesc, long maxDelayMilli) { - this.entityDesc = entityDesc; - this.uniqueMessageId = getNextUniqueMoment(); - this.expectedMilli = this.uniqueMessageId + maxDelayMilli; - } - - private synchronized long getNextUniqueMoment() { - long moment = System.currentTimeMillis(); - lastMoment = moment != lastMoment ? moment : ++lastMoment; - return lastMoment; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - MessageLog that = (MessageLog) o; - - if (uniqueMessageId != that.uniqueMessageId) { - return false; - } - return entityDesc.equals(that.entityDesc); - } - - @Override - public int hashCode() { - int result = entityDesc.hashCode(); - result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); - return result; - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + MessageLog that = (MessageLog) o; + + if (uniqueMessageId != that.uniqueMessageId) { + return false; + } + return entityDesc.equals(that.entityDesc); } + @Override + public int hashCode() { + int result = entityDesc.hashCode(); + result = 31 * result + (int) (uniqueMessageId ^ (uniqueMessageId >>> 32)); + return result; + } + } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java index 8b4db36ba27ff..765ee8c0736f2 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaMetadataEventProducer.java @@ -29,15 +29,16 @@ import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; - /** * A Kafka implementation of {@link BaseMetadataEventProducer}. * - *

The topic names that this emits to can be controlled by constructing this with a {@link TopicConvention}. If - * none is given, defaults to a {@link TopicConventionImpl} with the default delimiter of an underscore (_). + *

The topic names that this emits to can be controlled by constructing this with a {@link + * TopicConvention}. If none is given, defaults to a {@link TopicConventionImpl} with the default + * delimiter of an underscore (_). */ @Slf4j -public class KafkaMetadataEventProducer +public class KafkaMetadataEventProducer< + SNAPSHOT extends RecordTemplate, ASPECT_UNION extends UnionTemplate, URN extends Urn> extends BaseMetadataEventProducer { private final Producer _producer; @@ -52,10 +53,11 @@ public class KafkaMetadataEventProducer snapshotClass, - @Nonnull Class aspectUnionClass, - @Nonnull Producer producer, - @Nonnull TopicConvention topicConvention) { + public KafkaMetadataEventProducer( + @Nonnull Class snapshotClass, + @Nonnull Class aspectUnionClass, + @Nonnull Producer producer, + @Nonnull TopicConvention topicConvention) { this(snapshotClass, aspectUnionClass, producer, topicConvention, null); } @@ -68,11 +70,12 @@ public KafkaMetadataEventProducer(@Nonnull Class snapshotClass, * @param topicConvention the convention to use to get kafka topic names * @param callback The {@link Callback} to invoke when the request is completed */ - public KafkaMetadataEventProducer(@Nonnull Class snapshotClass, - @Nonnull Class aspectUnionClass, - @Nonnull Producer producer, - @Nonnull TopicConvention topicConvention, - @Nullable Callback callback) { + public KafkaMetadataEventProducer( + @Nonnull Class snapshotClass, + @Nonnull Class aspectUnionClass, + @Nonnull Producer producer, + @Nonnull TopicConvention topicConvention, + @Nullable Callback callback) { super(snapshotClass, aspectUnionClass); _producer = producer; _callback = Optional.ofNullable(callback); @@ -80,8 +83,8 @@ public KafkaMetadataEventProducer(@Nonnull Class snapshotClass, } @Override - public void produceSnapshotBasedMetadataChangeEvent(@Nonnull URN urn, - @Nonnull ASPECT newValue) { + public void produceSnapshotBasedMetadataChangeEvent( + @Nonnull URN urn, @Nonnull ASPECT newValue) { MetadataChangeEvent metadataChangeEvent = new MetadataChangeEvent(); metadataChangeEvent.setProposedSnapshot(makeSnapshot(urn, newValue)); @@ -93,16 +96,20 @@ record = EventUtils.pegasusToAvroMCE(metadataChangeEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataChangeEventTopicName(), urn.toString(), record)); } } @Override - public void produceMetadataAuditEvent(@Nonnull URN urn, @Nullable ASPECT oldValue, - @Nonnull ASPECT newValue) { + public void produceMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { MetadataAuditEvent metadataAuditEvent = new MetadataAuditEvent(); metadataAuditEvent.setNewSnapshot(makeSnapshot(urn, newValue)); @@ -118,16 +125,20 @@ record = EventUtils.pegasusToAvroMAE(metadataAuditEvent); } if (_callback.isPresent()) { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record), _callback.get()); } else { - _producer.send(new ProducerRecord(_topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); + _producer.send( + new ProducerRecord( + _topicConvention.getMetadataAuditEventTopicName(), urn.toString(), record)); } } @Override - public void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn, - @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { + public void produceAspectSpecificMetadataAuditEvent( + @Nonnull URN urn, @Nullable ASPECT oldValue, @Nonnull ASPECT newValue) { // Aspect Specific MAE not supported. // TODO: Remove references to this class. throw new UnsupportedOperationException(); @@ -139,7 +150,8 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { List aspects = new ArrayList<>(); aspects.add(ModelUtils.newAspectUnion(_aspectUnionClass, value)); - RecordUtils.setSelectedRecordTemplateInUnion(snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); + RecordUtils.setSelectedRecordTemplateInUnion( + snapshot, ModelUtils.newSnapshot(_snapshotClass, urn, aspects)); return snapshot; } @@ -147,4 +159,4 @@ private Snapshot makeSnapshot(@Nonnull URN urn, @Nonnull RecordTemplate value) { static boolean isValidAspectSpecificTopic(@Nonnull String topic) { return Arrays.stream(Topics.class.getFields()).anyMatch(field -> field.getName().equals(topic)); } -} \ No newline at end of file +} diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java index 2622404d03939..ca17ed4aa12d0 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaProducerCallback.java @@ -4,7 +4,6 @@ import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; - @Slf4j public class KafkaProducerCallback implements Callback { @Override diff --git a/metadata-events/mxe-avro/build.gradle b/metadata-events/mxe-avro/build.gradle index 9d11eeb160ff0..3aebc6bb1004d 100644 --- a/metadata-events/mxe-avro/build.gradle +++ b/metadata-events/mxe-avro/build.gradle @@ -47,4 +47,4 @@ jar { clean { delete 'src' -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java index df06d1bae28e0..5611e4356bb64 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Configs.java @@ -8,30 +8,32 @@ import java.util.Map; import org.apache.avro.Schema; - public class Configs { public static final Map FABRIC_SCHEMA_REGISTRY_MAP = - Collections.unmodifiableMap(new HashMap() { - { - put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); - put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); - } - }); + Collections.unmodifiableMap( + new HashMap() { + { + put("ei", "http://1.schemaregistry.ei4.atd.int.linkedin.com:10252"); + put("corp", "http://1.schemaregistry.corp-lca1.atd.corp.linkedin.com:10252"); + } + }); - public static final Map TOPIC_SCHEMA_MAP = Collections.unmodifiableMap(new HashMap() { - { - put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + public static final Map TOPIC_SCHEMA_MAP = + Collections.unmodifiableMap( + new HashMap() { + { + put(Topics.METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); - put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); - put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); - } - }); + put(Topics.DEV_METADATA_AUDIT_EVENT, MetadataAuditEvent.SCHEMA$); + put(Topics.DEV_METADATA_CHANGE_EVENT, MetadataChangeEvent.SCHEMA$); + put(Topics.DEV_FAILED_METADATA_CHANGE_EVENT, FailedMetadataChangeEvent.SCHEMA$); + } + }); private Configs() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java index 463abfdeca845..c61330565bcbf 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConvention.java @@ -5,65 +5,55 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * The convention for naming kafka topics. * - *

Different companies may have different naming conventions or styles for their kafka topics. Namely, companies - * should pick _ or . as a delimiter, but not both, as they collide in metric names. + *

Different companies may have different naming conventions or styles for their kafka topics. + * Namely, companies should pick _ or . as a delimiter, but not both, as they collide in metric + * names. */ public interface TopicConvention { /** - * The name of the metadata change event (v4) kafka topic. - * Note that MetadataChangeEvents are deprecated, replaced by {@link MetadataChangeProposal}. + * The name of the metadata change event (v4) kafka topic. Note that MetadataChangeEvents are + * deprecated, replaced by {@link MetadataChangeProposal}. */ @Nonnull @Deprecated String getMetadataChangeEventTopicName(); /** - * The name of the metadata audit event (v4) kafka topic. - * Note that MetadataAuditEvents are deprecated, replaced by {@link MetadataChangeLog}. + * The name of the metadata audit event (v4) kafka topic. Note that MetadataAuditEvents are + * deprecated, replaced by {@link MetadataChangeLog}. */ @Nonnull @Deprecated String getMetadataAuditEventTopicName(); /** - * The name of the failed metadata change event (v4) kafka topic. - * Note that FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. + * The name of the failed metadata change event (v4) kafka topic. Note that + * FailedMetadataChangeEvents are deprecated, replaced by {@link FailedMetadataChangeProposal}. */ @Nonnull @Deprecated String getFailedMetadataChangeEventTopicName(); - /** - * The name of the metadata change proposal kafka topic. - */ + /** The name of the metadata change proposal kafka topic. */ @Nonnull String getMetadataChangeProposalTopicName(); - /** - * The name of the metadata change log kafka topic. - */ + /** The name of the metadata change log kafka topic. */ @Nonnull String getMetadataChangeLogVersionedTopicName(); - /** - * The name of the metadata change log kafka topic with limited retention. - */ + /** The name of the metadata change log kafka topic with limited retention. */ @Nonnull String getMetadataChangeLogTimeseriesTopicName(); - /** - * The name of the failed metadata change proposal kafka topic. - */ + /** The name of the failed metadata change proposal kafka topic. */ @Nonnull String getFailedMetadataChangeProposalTopicName(); - /** - * The name of the platform event topic. - */ + /** The name of the platform event topic. */ @Nonnull String getPlatformEventTopicName(); @@ -77,9 +67,7 @@ public interface TopicConvention { @Deprecated String getMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - /** - * The name of the DataHub Upgrade history topic. - */ + /** The name of the DataHub Upgrade history topic. */ String getDataHubUpgradeHistoryTopicName(); /** @@ -89,7 +77,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the metadata audit event (v5) kafka topic. @@ -108,8 +97,8 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); - + Class getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); /** * Returns the name of the failed metadata change event (v5) kafka topic. @@ -128,5 +117,6 @@ public interface TopicConvention { * @param aspect the aspect name being updated */ @Deprecated - Class getFailedMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect); + Class getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java index 3143584bbdcaf..282a015319781 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/TopicConventionImpl.java @@ -5,14 +5,14 @@ import javax.annotation.Nonnull; import org.apache.avro.specific.SpecificRecord; - /** * Default implementation of a {@link TopicConvention}, which is fully customizable for event names. * - *

The newer aspect-entity specific event names are based on a pattern that can also be configured. The pattern is a - * string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and - * {@link #ASPECT_PLACEHOLDER} as placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, - * and aspect name, respectively. + *

The newer aspect-entity specific event names are based on a pattern that can also be + * configured. The pattern is a string, which can use {@link #EVENT_TYPE_PLACEHOLDER}, {@link + * #VERSION_PLACEHOLDER}, {@link #ENTITY_PLACEHOLDER}, and {@link #ASPECT_PLACEHOLDER} as + * placeholders for the event type (MCE, MAE, FMCE, etc), event version, entity name, and aspect + * name, respectively. */ public final class TopicConventionImpl implements TopicConvention { // Placeholders @@ -45,11 +45,17 @@ public final class TopicConventionImpl implements TopicConvention { // v5 patterns private final String _eventPattern; - public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnull String metadataAuditEventTopicName, - @Nonnull String failedMetadataChangeEventTopicName, @Nonnull String metadataChangeProposalTopicName, - @Nonnull String metadataChangeLogVersionedTopicName, @Nonnull String metadataChangeLogTimeseriesTopicName, - @Nonnull String failedMetadataChangeProposalTopicName, @Nonnull String platformEventTopicName, - @Nonnull String eventPattern, @Nonnull String dataHubUpgradeHistoryTopicName) { + public TopicConventionImpl( + @Nonnull String metadataChangeEventTopicName, + @Nonnull String metadataAuditEventTopicName, + @Nonnull String failedMetadataChangeEventTopicName, + @Nonnull String metadataChangeProposalTopicName, + @Nonnull String metadataChangeLogVersionedTopicName, + @Nonnull String metadataChangeLogTimeseriesTopicName, + @Nonnull String failedMetadataChangeProposalTopicName, + @Nonnull String platformEventTopicName, + @Nonnull String eventPattern, + @Nonnull String dataHubUpgradeHistoryTopicName) { _metadataChangeEventTopicName = metadataChangeEventTopicName; _metadataAuditEventTopicName = metadataAuditEventTopicName; _failedMetadataChangeEventTopicName = failedMetadataChangeEventTopicName; @@ -63,9 +69,17 @@ public TopicConventionImpl(@Nonnull String metadataChangeEventTopicName, @Nonnul } public TopicConventionImpl() { - this(Topics.METADATA_CHANGE_EVENT, Topics.METADATA_AUDIT_EVENT, Topics.FAILED_METADATA_CHANGE_EVENT, - Topics.METADATA_CHANGE_PROPOSAL, Topics.METADATA_CHANGE_LOG_VERSIONED, Topics.METADATA_CHANGE_LOG_TIMESERIES, - Topics.FAILED_METADATA_CHANGE_PROPOSAL, Topics.PLATFORM_EVENT, DEFAULT_EVENT_PATTERN, Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); + this( + Topics.METADATA_CHANGE_EVENT, + Topics.METADATA_AUDIT_EVENT, + Topics.FAILED_METADATA_CHANGE_EVENT, + Topics.METADATA_CHANGE_PROPOSAL, + Topics.METADATA_CHANGE_LOG_VERSIONED, + Topics.METADATA_CHANGE_LOG_TIMESERIES, + Topics.FAILED_METADATA_CHANGE_PROPOSAL, + Topics.PLATFORM_EVENT, + DEFAULT_EVENT_PATTERN, + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME); } @Nonnull @@ -117,15 +131,20 @@ public String getPlatformEventTopicName() { } @Nonnull - private String buildEventName(@Nonnull String eventType, @Nonnull String entityName, @Nonnull String aspectName, + private String buildEventName( + @Nonnull String eventType, + @Nonnull String entityName, + @Nonnull String aspectName, int version) { - return _eventPattern.replace(EVENT_TYPE_PLACEHOLDER, eventType) + return _eventPattern + .replace(EVENT_TYPE_PLACEHOLDER, eventType) .replace(ENTITY_PLACEHOLDER, entityName) .replace(ASPECT_PLACEHOLDER, aspectName) .replace(VERSION_PLACEHOLDER, Integer.toString(version)); } - private String buildEventName(@Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + private String buildEventName( + @Nonnull String eventType, @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { final String urnName = urn.getClass().getSimpleName(); // Expect URN name to relate to the entity name. (EntityName) + "Urn" == (UrnName) final String entityType = urnName.substring(0, urnName.length() - "Urn".length()); @@ -147,7 +166,8 @@ public String getDataHubUpgradeHistoryTopicName() { } @Override - public Class getMetadataChangeEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class getMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @@ -159,20 +179,22 @@ public String getMetadataAuditEventTopicName(@Nonnull Urn urn, @Nonnull RecordTe } @Override - public Class getMetadataAuditEventType(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public Class getMetadataAuditEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } @Nonnull @Override - public String getFailedMetadataChangeEventTopicName(@Nonnull Urn urn, @Nonnull RecordTemplate aspect) { + public String getFailedMetadataChangeEventTopicName( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { return buildEventName(FAILED_METADATA_CHANGE_EVENT_TYPE, urn, aspect); } @Override - public Class getFailedMetadataChangeEventType(@Nonnull Urn urn, - @Nonnull RecordTemplate aspect) { + public Class getFailedMetadataChangeEventType( + @Nonnull Urn urn, @Nonnull RecordTemplate aspect) { // v5 is still in development. throw new UnsupportedOperationException("TODO - implement once versions are in annotations."); } diff --git a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java index 3a9a0812e1031..45bc2364aaa42 100644 --- a/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java +++ b/metadata-events/mxe-registration/src/main/java/com/linkedin/mxe/Topics.java @@ -19,10 +19,7 @@ public class Topics { public static final String DEV_METADATA_CHANGE_EVENT = "MetadataChangeEvent_v4_dev"; public static final String DEV_FAILED_METADATA_CHANGE_EVENT = "FailedMetadataChangeEvent_v4_dev"; - /** - * aspect-specific MAE topics. - * format : METADATA_AUDIT_EVENT__ - */ + /** aspect-specific MAE topics. format : METADATA_AUDIT_EVENT__ */ // MAE topics for CorpGroup entity. public static final String METADATA_AUDIT_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataAuditEvent_CorpGroup_CorpGroupInfo_v1"; @@ -30,12 +27,10 @@ public class Topics { // MAE topics for CorpUser entity. public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSEREDITABLEINFO = "MetadataAuditEvent_CorpUser_CorpUserEditableInfo_v2"; - public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; + public static final String METADATA_AUDIT_EVENT_CORPUSER_CORPUSERINFO = + "MetadataAuditEvent_CorpUser_CorpUserInfo_v2"; - /** - * aspect-specific MCE topics. - * format : METADATA_CHANGE_EVENT__ - */ + /** aspect-specific MCE topics. format : METADATA_CHANGE_EVENT__ */ // MCE topics for CorpGroup entity. public static final String METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "MetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -46,10 +41,7 @@ public class Topics { public static final String METADATA_CHANGE_EVENT_CORPUSER_CORPUSERINFO = "MetadataChangeEvent_CorpUser_CorpUserInfo_v1"; - /** - * aspect-specific FMCE topics. - * format : FAILED_METADATA_CHANGE_EVENT__ - */ + /** aspect-specific FMCE topics. format : FAILED_METADATA_CHANGE_EVENT__ */ // FMCE topics for CorpGroup entity. public static final String FAILED_METADATA_CHANGE_EVENT_CORPGROUP_CORPGROUPINFO = "FailedMetadataChangeEvent_CorpGroup_CorpGroupInfo_v1"; @@ -63,4 +55,4 @@ public class Topics { private Topics() { // Util class } -} \ No newline at end of file +} diff --git a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java index 9c95d9f4aabdc..645c2fe210e09 100644 --- a/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java +++ b/metadata-events/mxe-utils-avro/src/main/java/com/linkedin/metadata/EventUtils.java @@ -8,10 +8,10 @@ import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.FailedMetadataChangeEvent; import com.linkedin.mxe.FailedMetadataChangeProposal; -import com.linkedin.mxe.MetadataChangeLog; -import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.MetadataAuditEvent; import com.linkedin.mxe.MetadataChangeEvent; +import com.linkedin.mxe.MetadataChangeLog; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -30,7 +30,6 @@ import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificRecord; - public class EventUtils { private static final RecordDataSchema MCE_PEGASUS_SCHEMA = new MetadataChangeEvent().schema(); @@ -43,7 +42,8 @@ public class EventUtils { private static final RecordDataSchema PE_PEGASUS_SCHEMA = new PlatformEvent().schema(); - private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = new DataHubUpgradeHistoryEvent().schema(); + private static final RecordDataSchema DUHE_PEGASUS_SCHEMA = + new DataHubUpgradeHistoryEvent().schema(); private static final Schema ORIGINAL_MCE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/MetadataChangeEvent.avsc"); @@ -69,14 +69,17 @@ public class EventUtils { public static final Schema ORIGINAL_DUHE_AVRO_SCHEMA = getAvroSchemaFromResource("avro/com/linkedin/mxe/DataHubUpgradeHistoryEvent.avsc"); - private static final Schema RENAMED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; + private static final Schema RENAMED_MCE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_MAE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; + private static final Schema RENAMED_MAE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$; private static final Schema RENAMED_FAILED_MCE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$; - private static final Schema RENAMED_PE_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; + private static final Schema RENAMED_PE_AVRO_SCHEMA = + com.linkedin.pegasus2avro.mxe.PlatformEvent.SCHEMA$; private static final Schema RENAMED_MCP_AVRO_SCHEMA = com.linkedin.pegasus2avro.mxe.MetadataChangeProposal.SCHEMA$; @@ -107,79 +110,102 @@ private static Schema getAvroSchemaFromResource(@Nonnull String resourcePath) { /** * Converts a {@link GenericRecord} MAE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MAE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataAuditEvent} model */ @Nonnull - public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) throws IOException { - return new MetadataAuditEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), MAE_PEGASUS_SCHEMA, - ORIGINAL_MAE_AVRO_SCHEMA)); + public static MetadataAuditEvent avroToPegasusMAE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataAuditEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MAE_AVRO_SCHEMA, ORIGINAL_MAE_AVRO_SCHEMA), + MAE_PEGASUS_SCHEMA, + ORIGINAL_MAE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeEvent} model */ @Nonnull - public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), MCE_PEGASUS_SCHEMA, - ORIGINAL_MCE_AVRO_SCHEMA)); + public static MetadataChangeEvent avroToPegasusMCE(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCE_AVRO_SCHEMA, ORIGINAL_MCE_AVRO_SCHEMA), + MCE_PEGASUS_SCHEMA, + ORIGINAL_MCE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCL into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCL in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeLog} model */ @Nonnull - public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeLog(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), - MCL_PEGASUS_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA)); + public static MetadataChangeLog avroToPegasusMCL(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeLog( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCL_AVRO_SCHEMA, ORIGINAL_MCL_AVRO_SCHEMA), + MCL_PEGASUS_SCHEMA, + ORIGINAL_MCL_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} MCP into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the MCP in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link MetadataChangeProposal} model */ @Nonnull - public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) throws IOException { - return new MetadataChangeProposal(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), - MCP_PEGASUS_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA)); + public static MetadataChangeProposal avroToPegasusMCP(@Nonnull GenericRecord record) + throws IOException { + return new MetadataChangeProposal( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_MCP_AVRO_SCHEMA, ORIGINAL_MCP_AVRO_SCHEMA), + MCP_PEGASUS_SCHEMA, + ORIGINAL_MCP_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull public static PlatformEvent avroToPegasusPE(@Nonnull GenericRecord record) throws IOException { - return new PlatformEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), - PE_PEGASUS_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA)); + return new PlatformEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_PE_AVRO_SCHEMA, ORIGINAL_PE_AVRO_SCHEMA), + PE_PEGASUS_SCHEMA, + ORIGINAL_PE_AVRO_SCHEMA)); } /** * Converts a {@link GenericRecord} PE into the equivalent Pegasus model. * - * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro namespace + * @param record the {@link GenericRecord} that contains the PE in com.linkedin.pegasus2avro + * namespace * @return the Pegasus {@link PlatformEvent} model */ @Nonnull - public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) throws IOException { - return new DataHubUpgradeHistoryEvent(DataTranslator.genericRecordToDataMap( - renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), - DUHE_PEGASUS_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA)); + public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecord record) + throws IOException { + return new DataHubUpgradeHistoryEvent( + DataTranslator.genericRecordToDataMap( + renameSchemaNamespace(record, RENAMED_DUHE_AVRO_SCHEMA, ORIGINAL_DUHE_AVRO_SCHEMA), + DUHE_PEGASUS_SCHEMA, + ORIGINAL_DUHE_AVRO_SCHEMA)); } /** @@ -190,9 +216,11 @@ public static DataHubUpgradeHistoryEvent avroToPegasusDUHE(@Nonnull GenericRecor * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) throws IOException { + public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MAE_AVRO_SCHEMA); } @@ -204,9 +232,11 @@ public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) throws IOException { + public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCL_AVRO_SCHEMA); } @@ -218,9 +248,11 @@ public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) t * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) throws IOException { + public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCP_AVRO_SCHEMA); } @@ -232,26 +264,30 @@ public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal eve * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) throws IOException { + public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_MCE_AVRO_SCHEMA); } /** - * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus aspect specific MXE into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus aspect specific MXE model * @return the Avro model with com.linkedin.pegasus2avro.mxe namespace * @throws IOException if the conversion fails */ @Nonnull - public static MXE pegasusToAvroAspectSpecificMXE( - @Nonnull Class clazz, @Nonnull RecordTemplate event) - throws NoSuchFieldException, IOException, IllegalAccessException { + public static + MXE pegasusToAvroAspectSpecificMXE(@Nonnull Class clazz, @Nonnull RecordTemplate event) + throws NoSuchFieldException, IOException, IllegalAccessException { final Schema newSchema = (Schema) clazz.getField("SCHEMA$").get(null); final Schema originalSchema = getAvroSchemaFromResource(getAvroResourcePath(clazz)); - final GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); + final GenericRecord original = + DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), originalSchema); return (MXE) renameSchemaNamespace(original, originalSchema, newSchema); } @@ -263,10 +299,12 @@ public static MXE pegasusT * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) - throws IOException { + public static GenericRecord pegasusToAvroFailedMCE( + @Nonnull FailedMetadataChangeEvent failedMetadataChangeEvent) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeEvent.data(), failedMetadataChangeEvent.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeEvent.data(), + failedMetadataChangeEvent.schema(), ORIGINAL_FAILED_MCE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FAILED_MCE_AVRO_SCHEMA); } @@ -282,7 +320,9 @@ public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChange public static GenericRecord pegasusToAvroFailedMCP( @Nonnull FailedMetadataChangeProposal failedMetadataChangeProposal) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(failedMetadataChangeProposal.data(), failedMetadataChangeProposal.schema(), + DataTranslator.dataMapToGenericRecord( + failedMetadataChangeProposal.data(), + failedMetadataChangeProposal.schema(), ORIGINAL_FMCL_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_FMCP_AVRO_SCHEMA); } @@ -297,33 +337,37 @@ public static GenericRecord pegasusToAvroFailedMCP( @Nonnull public static GenericRecord pegasusToAvroPE(@Nonnull PlatformEvent event) throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_PE_AVRO_SCHEMA); } /** - * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link GenericRecord}. + * Converts a Pegasus DataHub Upgrade History Event into the equivalent Avro model as a {@link + * GenericRecord}. * * @param event the Pegasus {@link com.linkedin.mxe.DataHubUpgradeHistoryEvent} model * @return the Avro model with com.linkedin.pegasus2avro.event namespace * @throws IOException if the conversion fails */ @Nonnull - public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) throws IOException { + public static GenericRecord pegasusToAvroDUHE(@Nonnull DataHubUpgradeHistoryEvent event) + throws IOException { GenericRecord original = - DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); + DataTranslator.dataMapToGenericRecord( + event.data(), event.schema(), ORIGINAL_DUHE_AVRO_SCHEMA); return renameSchemaNamespace(original, RENAMED_DUHE_AVRO_SCHEMA); } /** - * Converts original MXE into a renamed namespace - * Does a double convert that should not be necessary since we're already converting prior to calling this method - * in most spots + * Converts original MXE into a renamed namespace Does a double convert that should not be + * necessary since we're already converting prior to calling this method in most spots */ @Nonnull @Deprecated - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema originalSchema, - @Nonnull Schema newSchema) throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema originalSchema, @Nonnull Schema newSchema) + throws IOException { // Step 1: Updates to the latest original schema final GenericRecord record = changeSchema(original, original.getSchema(), originalSchema); @@ -332,12 +376,10 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin return changeSchema(record, newSchema, newSchema); } - /** - * Converts original MXE into a renamed namespace - */ + /** Converts original MXE into a renamed namespace */ @Nonnull - private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema newSchema) - throws IOException { + private static GenericRecord renameSchemaNamespace( + @Nonnull GenericRecord original, @Nonnull Schema newSchema) throws IOException { return changeSchema(original, newSchema, newSchema); } @@ -345,7 +387,8 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin /** * Changes the schema of a {@link GenericRecord} to a compatible schema * - * Achieved by serializing the record using its embedded schema and deserializing it using the new compatible schema. + *

Achieved by serializing the record using its embedded schema and deserializing it using the + * new compatible schema. * * @param record the record to update schema for * @param writerSchema the writer schema to use when deserializing @@ -354,8 +397,9 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin * @throws IOException */ @Nonnull - private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnull Schema writerSchema, - @Nonnull Schema readerSchema) throws IOException { + private static GenericRecord changeSchema( + @Nonnull GenericRecord record, @Nonnull Schema writerSchema, @Nonnull Schema readerSchema) + throws IOException { try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(os, null); DatumWriter writer = new GenericDatumWriter<>(record.getSchema()); @@ -374,6 +418,7 @@ private static GenericRecord changeSchema(@Nonnull GenericRecord record, @Nonnul /** * Get Pegasus class from Avro class. + * * @param clazz the aspect specific MXE avro class * @return the Pegasus aspect specific MXE class * @throws Exception @@ -383,6 +428,7 @@ public static Class getPegasusClass(@Nonnull Class clazz) throws ClassNotF } private static String getAvroResourcePath(@Nonnull Class clazz) { - return String.format("avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); + return String.format( + "avro/%s.avsc", clazz.getCanonicalName().replace(".pegasus2avro", "").replace(".", "/")); } } diff --git a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java index 66759d4637c18..1318109d476d7 100644 --- a/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java +++ b/metadata-events/mxe-utils-avro/src/test/java/com/linkedin/metadata/EventUtilsTests.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.datahub.utils.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.data.template.RecordTemplate; @@ -16,39 +19,53 @@ import org.apache.avro.io.JsonDecoder; import org.testng.annotations.Test; -import static com.datahub.utils.TestUtils.*; -import static org.testng.Assert.*; - - public class EventUtilsTests { @Test public void testAvroToPegasusMAE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mae.json", - com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mae.json", com.linkedin.pegasus2avro.mxe.MetadataAuditEvent.SCHEMA$); MetadataAuditEvent mae = EventUtils.avroToPegasusMAE(record); assertEquals( - mae.getNewSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mae.getNewSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testAvroToPegasusMCE() throws IOException { - GenericRecord record = genericRecordFromResource("test-avro2pegasus-mce.json", - com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); + GenericRecord record = + genericRecordFromResource( + "test-avro2pegasus-mce.json", + com.linkedin.pegasus2avro.mxe.MetadataChangeEvent.SCHEMA$); MetadataChangeEvent mce = EventUtils.avroToPegasusMCE(record); assertEquals( - mce.getProposedSnapshot().getDatasetSnapshot().getAspects().get(0).getOwnership().getOwners().get(0).getOwner(), + mce.getProposedSnapshot() + .getDatasetSnapshot() + .getAspects() + .get(0) + .getOwnership() + .getOwners() + .get(0) + .getOwner(), new CorpuserUrn("foobar")); } @Test public void testPegasusToAvroMAE() throws IOException { - MetadataAuditEvent event = recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); + MetadataAuditEvent event = + recordTemplateFromResource("test-pegasus2avro-mae.json", MetadataAuditEvent.class); GenericRecord record = EventUtils.pegasusToAvroMAE(event); @@ -58,7 +75,8 @@ public void testPegasusToAvroMAE() throws IOException { @Test public void testPegasusToAvroMCE() throws IOException { - MetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); + MetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-mce.json", MetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroMCE(event); @@ -68,24 +86,27 @@ public void testPegasusToAvroMCE() throws IOException { @Test public void testPegasusToAvroFailedMCE() throws IOException { - FailedMetadataChangeEvent event = recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); + FailedMetadataChangeEvent event = + recordTemplateFromResource("test-pegasus2avro-fmce.json", FailedMetadataChangeEvent.class); GenericRecord record = EventUtils.pegasusToAvroFailedMCE(event); - assertEquals(record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); + assertEquals( + record.getSchema(), com.linkedin.pegasus2avro.mxe.FailedMetadataChangeEvent.SCHEMA$); assertNotNull(record.get("error")); assertNotNull(record.get("metadataChangeEvent")); } - private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) throws IOException { + private GenericRecord genericRecordFromResource(String resourcePath, Schema schema) + throws IOException { InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath); JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, is); DatumReader reader = new GenericDatumReader<>(schema); return reader.read(null, decoder); } - private T recordTemplateFromResource(String resourcePath, - Class clazz) throws IOException { + private T recordTemplateFromResource( + String resourcePath, Class clazz) throws IOException { String json = loadJsonFromResource(resourcePath); return (T) RecordUtils.toRecordTemplate(clazz, json); } diff --git a/metadata-integration/java/datahub-client/build.gradle b/metadata-integration/java/datahub-client/build.gradle index 0bf6b18fa5073..7ae01faaaabdd 100644 --- a/metadata-integration/java/datahub-client/build.gradle +++ b/metadata-integration/java/datahub-client/build.gradle @@ -239,8 +239,6 @@ processResources.dependsOn generateOpenApiPojos sourceSets.main.java.srcDir "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -checkstyleMain.exclude '**/generated/**' - clean { project.delete("$projectDir/generated") } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java index 84fe9cef0817c..a899f27a0cb2c 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Callback.java @@ -2,21 +2,20 @@ import javax.annotation.Nullable; - public interface Callback { /** - * Called when the client request has completed. - * Completion does not imply success. Inspect the response object to understand if - * this was a successfully processed request or not. + * Called when the client request has completed. Completion does not imply success. Inspect the + * response object to understand if this was a successfully processed request or not. + * * @param response */ void onCompletion(@Nullable MetadataWriteResponse response); /** * Called when the client request has thrown an exception before completion. + * * @param exception */ void onFailure(Throwable exception); - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java index 25bcba5f7d4c6..97c4558933b69 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/Emitter.java @@ -11,61 +11,74 @@ import javax.annotation.Nonnull; import javax.annotation.concurrent.ThreadSafe; - /** - * An interface implemented by all metadata emitters to DataHub. - * Typical usage: - * 1. Construct the emitter using the native constructor or builder for the Emitter. - * 2. Call `emitter.emit(mcpw, callback)` for each event you want to send - * 3. Wait for all events to be sent by inspecting the futures returned by each call or using callbacks - * 4. Call `emitter.close()` to finalize. + * An interface implemented by all metadata emitters to DataHub. Typical usage: 1. Construct the + * emitter using the native constructor or builder for the Emitter. 2. Call `emitter.emit(mcpw, + * callback)` for each event you want to send 3. Wait for all events to be sent by inspecting the + * futures returned by each call or using callbacks 4. Call `emitter.close()` to finalize. */ @ThreadSafe public interface Emitter extends Closeable { /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) throws IOException; + Future emit(@Nonnull MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposalWrapper} event. + * * @param mcpw - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future emit(@Nonnull MetadataChangeProposalWrapper mcpw) throws IOException { + default Future emit(@Nonnull MetadataChangeProposalWrapper mcpw) + throws IOException { return emit(mcpw, null); } /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp * @param callback if not null, is called from the IO thread. Should be a quick operation. - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - Future emit(@Nonnull MetadataChangeProposal mcp, Callback callback) throws IOException; + Future emit(@Nonnull MetadataChangeProposal mcp, Callback callback) + throws IOException; /** * Asynchronously emit a {@link MetadataChangeProposal} event. Prefer using the sibling method - * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to construct. + * that accepts a {@link MetadataChangeProposalWrapper} event as those are friendlier to + * construct. + * * @param mcp - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ - default Future emit(@Nonnull MetadataChangeProposal mcp) throws IOException { + default Future emit(@Nonnull MetadataChangeProposal mcp) + throws IOException { return emit(mcp, null); } /** * Test that the emitter can establish a valid connection to the DataHub platform - * @return true if a valid connection can be established, false or throws one of the exceptions otherwise + * + * @return true if a valid connection can be established, false or throws one of the exceptions + * otherwise * @throws IOException * @throws ExecutionException * @throws InterruptedException @@ -74,11 +87,12 @@ default Future emit(@Nonnull MetadataChangeProposal mcp) /** * Asynchronously emit a {@link UpsertAspectRequest}. + * * @param request request with with metadata aspect to upsert into DataHub - * @return a {@link Future} for callers to inspect the result of the operation or block until one is available + * @return a {@link Future} for callers to inspect the result of the operation or block until one + * is available * @throws IOException */ Future emit(List request, Callback callback) throws IOException; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java index 51126a1cdcbea..89db9738efda6 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataResponseFuture.java @@ -9,15 +9,16 @@ import lombok.SneakyThrows; import org.apache.http.HttpResponse; - public class MetadataResponseFuture implements Future { private final Future requestFuture; private final AtomicReference responseReference; private final CountDownLatch responseLatch; private final ResponseMapper mapper; - public MetadataResponseFuture(Future underlyingFuture, - AtomicReference responseAtomicReference, CountDownLatch responseLatch) { + public MetadataResponseFuture( + Future underlyingFuture, + AtomicReference responseAtomicReference, + CountDownLatch responseLatch) { this.requestFuture = underlyingFuture; this.responseReference = responseAtomicReference; this.responseLatch = responseLatch; diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java index 969ef10c41a24..b6e77556980c1 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/MetadataWriteResponse.java @@ -3,27 +3,19 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class MetadataWriteResponse { - /** - * True if the client send succeeded and we got a successful response from the server - */ - @Builder.Default - boolean success = true; + /** True if the client send succeeded and we got a successful response from the server */ + @Builder.Default boolean success = true; /** - * If the write failed due to an exception thrown by the server - * and we have access to it, then we store the stack trace here + * If the write failed due to an exception thrown by the server and we have access to it, then we + * store the stack trace here */ String responseContent; - /** - * The underlying response object - * (typically an HTTPResponse or a kafka.ResponseMetadata) - */ + /** The underlying response object (typically an HTTPResponse or a kafka.ResponseMetadata) */ Object underlyingResponse; - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java index aae0e51b6736e..ab866f060b354 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitter.java @@ -1,34 +1,31 @@ package datahub.client.file; -import com.fasterxml.jackson.core.StreamReadConstraints; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.List; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +import static com.linkedin.metadata.Constants.*; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.util.DefaultIndenter; import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.client.Callback; import datahub.client.Emitter; import datahub.client.MetadataWriteResponse; import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; import datahub.event.UpsertAspectRequest; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class FileEmitter implements Emitter { @@ -45,22 +42,27 @@ public class FileEmitter implements Emitter { /** * The default constructor - * + * * @param config */ public FileEmitter(FileEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; this.eventFormatter = this.config.getEventFormatter(); - DefaultPrettyPrinter pp = new DefaultPrettyPrinter() - .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) - .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); + DefaultPrettyPrinter pp = + new DefaultPrettyPrinter() + .withObjectIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)) + .withArrayIndenter(new DefaultIndenter(FileEmitter.INDENT_4, DefaultIndenter.SYS_LF)); this.dataTemplateCodec.setPrettyPrinter(pp); try { @@ -75,33 +77,37 @@ public FileEmitter(FileEmitterConfig config) { this.wroteSomething = false; log.debug("Emitter created successfully for " + this.config.getFileName()); - this.cachedSuccessFuture = new Future() { - @Override - public boolean cancel(boolean mayInterruptIfRunning) { - return false; - } - - @Override - public MetadataWriteResponse get() throws InterruptedException, ExecutionException { - return MetadataWriteResponse.builder().success(true).responseContent("MCP witten to File").build(); - } - - @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) - throws InterruptedException, ExecutionException, TimeoutException { - return this.get(); - } - - @Override - public boolean isCancelled() { - return false; - } - - @Override - public boolean isDone() { - return true; - } - }; + this.cachedSuccessFuture = + new Future() { + @Override + public boolean cancel(boolean mayInterruptIfRunning) { + return false; + } + + @Override + public MetadataWriteResponse get() throws InterruptedException, ExecutionException { + return MetadataWriteResponse.builder() + .success(true) + .responseContent("MCP witten to File") + .build(); + } + + @Override + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { + return this.get(); + } + + @Override + public boolean isCancelled() { + return false; + } + + @Override + public boolean isDone() { + return true; + } + }; } @Override @@ -114,13 +120,15 @@ public void close() throws IOException { } @Override - public Future emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { + public Future emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { return emit(this.eventFormatter.convert(mcpw), callback); } @Override - public Future emit(MetadataChangeProposal mcp, Callback callback) throws IOException { + public Future emit(MetadataChangeProposal mcp, Callback callback) + throws IOException { if (this.closed.get()) { String errorMsg = "File Emitter is already closed."; log.error(errorMsg); @@ -167,7 +175,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future emit(List request, Callback callback) throws IOException { + public Future emit(List request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest not relevant for File Emitter"); } @@ -185,8 +194,8 @@ public MetadataWriteResponse get() throws InterruptedException, ExecutionExcepti } @Override - public MetadataWriteResponse get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, - TimeoutException { + public MetadataWriteResponse get(long timeout, TimeUnit unit) + throws InterruptedException, ExecutionException, TimeoutException { return this.get(); } @@ -199,8 +208,6 @@ public boolean isCancelled() { public boolean isDone() { return true; } - }; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java index c89edef81ef5e..61ee12d88824d 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/file/FileEmitterConfig.java @@ -7,10 +7,9 @@ @Value @Builder public class FileEmitterConfig { + @Builder.Default @lombok.NonNull private final String fileName = null; + @Builder.Default - @lombok.NonNull - private final String fileName = null; - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java index 6212e57470be4..0d0341562e7dd 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/AvroSerializer.java @@ -1,16 +1,13 @@ package datahub.client.kafka; -import java.io.IOException; - -import org.apache.avro.Schema; -import org.apache.avro.generic.GenericData; -import org.apache.avro.generic.GenericRecord; - import com.google.common.annotations.VisibleForTesting; import com.linkedin.mxe.MetadataChangeProposal; - import datahub.event.EventFormatter; import datahub.event.MetadataChangeProposalWrapper; +import java.io.IOException; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericRecord; class AvroSerializer { @@ -20,8 +17,12 @@ class AvroSerializer { private final EventFormatter _eventFormatter; public AvroSerializer() throws IOException { - _recordSchema = new Schema.Parser() - .parse(this.getClass().getClassLoader().getResourceAsStream("MetadataChangeProposal.avsc")); + _recordSchema = + new Schema.Parser() + .parse( + this.getClass() + .getClassLoader() + .getResourceAsStream("MetadataChangeProposal.avsc")); _genericAspectSchema = this._recordSchema.getField("aspect").schema().getTypes().get(1); _changeTypeEnumSchema = this._recordSchema.getField("changeType").schema(); _eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); @@ -32,7 +33,8 @@ Schema getRecordSchema() { return _recordSchema; } - public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) throws IOException { + public GenericRecord serialize(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw) + throws IOException { return serialize(_eventFormatter.convert(mcpw)); } @@ -45,7 +47,8 @@ public GenericRecord serialize(MetadataChangeProposal mcp) throws IOException { genericRecord.put("aspect", genericAspect); genericRecord.put("aspectName", mcp.getAspectName()); genericRecord.put("entityType", mcp.getEntityType()); - genericRecord.put("changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); + genericRecord.put( + "changeType", new GenericData.EnumSymbol(_changeTypeEnumSchema, mcp.getChangeType())); return genericRecord; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java index 45528f79fad19..ba310de14813e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitter.java @@ -1,5 +1,11 @@ package datahub.client.kafka; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.IOException; import java.util.List; import java.util.Properties; @@ -7,7 +13,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; - +import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.ListTopicsOptions; @@ -16,15 +22,6 @@ import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - @Slf4j public class KafkaEmitter implements Emitter { @@ -45,9 +42,11 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { this.config = config; kafkaConfigProperties = new Properties(); kafkaConfigProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.config.getBootstrap()); - kafkaConfigProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, org.apache.kafka.common.serialization.StringSerializer.class); - kafkaConfigProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + kafkaConfigProperties.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, io.confluent.kafka.serializers.KafkaAvroSerializer.class); kafkaConfigProperties.put("schema.registry.url", this.config.getSchemaRegistryUrl()); kafkaConfigProperties.putAll(config.getSchemaRegistryConfig()); @@ -59,28 +58,31 @@ public KafkaEmitter(KafkaEmitterConfig config) throws IOException { @Override public void close() throws IOException { producer.close(); - } @Override - public Future emit(@SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, - Callback datahubCallback) throws IOException { + public Future emit( + @SuppressWarnings("rawtypes") MetadataChangeProposalWrapper mcpw, Callback datahubCallback) + throws IOException { return emit(this.config.getEventFormatter().convert(mcpw), datahubCallback); } @Override - public Future emit(MetadataChangeProposal mcp, Callback datahubCallback) throws IOException { + public Future emit(MetadataChangeProposal mcp, Callback datahubCallback) + throws IOException { GenericRecord genricRecord = _avroSerializer.serialize(mcp); - ProducerRecord record = new ProducerRecord<>(KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, - mcp.getEntityUrn().toString(), genricRecord); - org.apache.kafka.clients.producer.Callback callback = new org.apache.kafka.clients.producer.Callback() { - - @Override - public void onCompletion(RecordMetadata metadata, Exception exception) { - MetadataWriteResponse response = mapResponse(metadata, exception); - datahubCallback.onCompletion(response); - } - }; + ProducerRecord record = + new ProducerRecord<>( + KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, mcp.getEntityUrn().toString(), genricRecord); + org.apache.kafka.clients.producer.Callback callback = + new org.apache.kafka.clients.producer.Callback() { + + @Override + public void onCompletion(RecordMetadata metadata, Exception exception) { + MetadataWriteResponse response = mapResponse(metadata, exception); + datahubCallback.onCompletion(response); + } + }; log.debug("Emit: topic: {} \n record: {}", KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC, record); Future future = this.producer.send(record, callback); return mapFuture(future); @@ -117,14 +119,17 @@ public boolean isDone() { return future.isDone(); } }; - } @Override public boolean testConnection() throws IOException, ExecutionException, InterruptedException { try (AdminClient client = AdminClient.create(this.kafkaConfigProperties)) { - log.info("Available topics:" - + client.listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)).listings().get()); + log.info( + "Available topics:" + + client + .listTopics(new ListTopicsOptions().timeoutMs(ADMIN_CLIENT_TIMEOUT_MS)) + .listings() + .get()); } catch (ExecutionException ex) { log.error("Kafka is not available, timed out after {} ms", ADMIN_CLIENT_TIMEOUT_MS); return false; @@ -133,7 +138,8 @@ public boolean testConnection() throws IOException, ExecutionException, Interrup } @Override - public Future emit(List request, Callback callback) throws IOException { + public Future emit(List request, Callback callback) + throws IOException { throw new UnsupportedOperationException("UpsertAspectRequest cannot be sent over Kafka"); } @@ -156,5 +162,4 @@ private static MetadataWriteResponse mapResponse(RecordMetadata metadata, Except public Properties getKafkaConfgiProperties() { return kafkaConfigProperties; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java index 9452dd5686ac7..c0a5df3bddf37 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/kafka/KafkaEmitterConfig.java @@ -1,12 +1,11 @@ package datahub.client.kafka; +import datahub.event.EventFormatter; import java.io.InputStream; import java.util.Collections; import java.util.Map; import java.util.Properties; import java.util.function.Consumer; - -import datahub.event.EventFormatter; import lombok.Builder; import lombok.Value; import lombok.extern.slf4j.Slf4j; @@ -18,24 +17,22 @@ public class KafkaEmitterConfig { public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; + @Builder.Default private final String bootstrap = "localhost:9092"; + @Builder.Default private final String schemaRegistryUrl = "http://localhost:8081"; + + @Builder.Default private final Map schemaRegistryConfig = Collections.emptyMap(); + @Builder.Default private final Map producerConfig = Collections.emptyMap(); + @Builder.Default - private final String bootstrap = "localhost:9092"; - @Builder.Default - private final String schemaRegistryUrl = "http://localhost:8081"; - - @Builder.Default - private final Map schemaRegistryConfig = Collections.emptyMap(); - @Builder.Default - private final Map producerConfig = Collections.emptyMap(); - - @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); - + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + public static class KafkaEmitterConfigBuilder { @SuppressWarnings("unused") private String getVersion() { - try (InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { Properties properties = new Properties(); properties.load(foo); return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); @@ -49,7 +46,5 @@ public KafkaEmitterConfigBuilder with(Consumer builde builderFunction.accept(this); return this; } - } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java index bf40addef6505..943aaefec469b 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/AbstractMultiFieldPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.linkedin.common.urn.Urn; @@ -13,9 +15,6 @@ import org.apache.commons.lang3.tuple.ImmutableTriple; import org.apache.http.entity.ContentType; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - public abstract class AbstractMultiFieldPatchBuilder> { public static final String OP_KEY = "op"; @@ -27,6 +26,7 @@ public abstract class AbstractMultiFieldPatchBuilder> triples = getPathValues(); - triples.forEach(triple -> patches.add(instance.objectNode().put(OP_KEY, triple.left) - .put(PATH_KEY, triple.middle) - .set(VALUE_KEY, triple.right))); + triples.forEach( + triple -> + patches.add( + instance + .objectNode() + .put(OP_KEY, triple.left) + .put(PATH_KEY, triple.middle) + .set(VALUE_KEY, triple.right))); GenericAspect genericAspect = new GenericAspect(); genericAspect.setContentType(ContentType.APPLICATION_JSON.getMimeType()); @@ -85,7 +94,9 @@ protected GenericAspect buildPatch() { } /** - * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should not be called more than once + * Constructs a list of Op, Path, Value triples to create as patches. Not idempotent and should + * not be called more than once + * * @return list of patch precursor triples */ protected List> getPathValues() { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java index 8e8b5e324586f..ac93fd24fee02 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/PatchOperationType.java @@ -2,16 +2,13 @@ import lombok.Getter; - public enum PatchOperationType { ADD("add"), REMOVE("remove"); - @Getter - private final String value; + @Getter private final String value; PatchOperationType(String value) { this.value = value; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java index 34618ddba7c5e..e621aaf57ff97 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/CustomPropertiesPatchBuilder.java @@ -1,5 +1,7 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -10,10 +12,8 @@ import java.util.Map; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; - - -public class CustomPropertiesPatchBuilder> implements IntermediatePatchBuilder { +public class CustomPropertiesPatchBuilder> + implements IntermediatePatchBuilder { public static final String CUSTOM_PROPERTIES_BASE_PATH = "/customProperties"; @@ -26,35 +26,46 @@ public CustomPropertiesPatchBuilder(T parentBuilder) { /** * Add a property to a custom properties field + * * @param key * @param value * @return */ public CustomPropertiesPatchBuilder addProperty(String key, String value) { - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, - instance.textNode(value))); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + CUSTOM_PROPERTIES_BASE_PATH + "/" + key, + instance.textNode(value))); return this; } /** - * Remove a property from a custom properties field. If the property doesn't exist, this is a no-op. + * Remove a property from a custom properties field. If the property doesn't exist, this is a + * no-op. + * * @param key * @return */ public CustomPropertiesPatchBuilder removeProperty(String key) { - operations.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); + operations.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), CUSTOM_PROPERTIES_BASE_PATH + "/" + key, null)); return this; } /** * Fully replace the properties of the target aspect + * * @param properties * @return */ public CustomPropertiesPatchBuilder setProperties(Map properties) { ObjectNode propertiesNode = instance.objectNode(); properties.forEach((key, value) -> propertiesNode.set(key, instance.textNode(value))); - operations.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); + operations.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), CUSTOM_PROPERTIES_BASE_PATH, propertiesNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java index 6cebee0ac1265..84db0ba307cf2 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/common/GlobalTagsPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.common; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; @@ -8,10 +11,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder { private static final String BASE_PATH = "/tags/"; @@ -20,6 +19,7 @@ public class GlobalTagsPatchBuilder extends AbstractMultiFieldPatchBuilder { +public class GlossaryTermsPatchBuilder + extends AbstractMultiFieldPatchBuilder { private static final String BASE_PATH = "/glossaryTerms/"; private static final String URN_KEY = "urn"; @@ -20,6 +20,7 @@ public class GlossaryTermsPatchBuilder extends AbstractMultiFieldPatchBuilder { private static final String BASE_PATH = "/owners/"; @@ -23,33 +22,39 @@ public OwnershipPatchBuilder addOwner(@Nonnull Urn owner, @Nonnull OwnershipType value.put(OWNER_KEY, owner.toString()); value.put(TYPE_KEY, type.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + owner + "/" + type, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + owner + "/" + type, value)); return this; } /** * Remove all ownership types for an owner + * * @param owner * @return */ public OwnershipPatchBuilder removeOwner(@Nonnull Urn owner) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + owner, null)); return this; } /** - * Removes a specific ownership type for a particular owner, a single owner may have multiple ownership types + * Removes a specific ownership type for a particular owner, a single owner may have multiple + * ownership types + * * @param owner * @param type * @return */ - public OwnershipPatchBuilder removeOwnershipType(@Nonnull Urn owner, @Nonnull OwnershipType type) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + owner + "/" + type, null)); + public OwnershipPatchBuilder removeOwnershipType( + @Nonnull Urn owner, @Nonnull OwnershipType type) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + owner + "/" + type, null)); return this; } @@ -61,7 +66,8 @@ protected String getAspectName() { @Override protected String getEntityType() { if (this.targetEntityUrn == null) { - throw new IllegalStateException("Target Entity Urn must be set to determine entity type before building Patch."); + throw new IllegalStateException( + "Target Entity Urn must be set to determine entity type before building Patch."); } return this.targetEntityUrn.getEntityType(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java index 3161eb492dff5..9e55ab4fc6db4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataflow/DataFlowInfoPatchBuilder.java @@ -1,23 +1,23 @@ package datahub.client.patch.dataflow; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; -import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import datahub.client.patch.common.CustomPropertiesPatchBuilder; +import datahub.client.patch.subtypesupport.CustomPropertiesPatchBuilderSupport; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder +public class DataFlowInfoPatchBuilder + extends AbstractMultiFieldPatchBuilder implements CustomPropertiesPatchBuilderSupport { public static final String BASE_PATH = "/"; @@ -30,28 +30,41 @@ public class DataFlowInfoPatchBuilder extends AbstractMultiFieldPatchBuilder customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); + private CustomPropertiesPatchBuilder customPropertiesPatchBuilder = + new CustomPropertiesPatchBuilder<>(this); public DataFlowInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataFlowInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DataFlowInfoPatchBuilder setProject(@Nullable String project) { if (project == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + PROJECT_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + PROJECT_KEY, instance.textNode(project))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + PROJECT_KEY, + instance.textNode(project))); } return this; } @@ -59,28 +72,35 @@ public DataFlowInfoPatchBuilder setProject(@Nullable String project) { public DataFlowInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataFlowInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java index 96e9c31288966..581616f54e9b9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInfoPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.TimeStamp; @@ -14,10 +17,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder implements CustomPropertiesPatchBuilderSupport { @@ -37,62 +36,80 @@ public class DataJobInfoPatchBuilder extends AbstractMultiFieldPatchBuilder(this); public DataJobInfoPatchBuilder setName(@Nonnull String name) { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); return this; } public DataJobInfoPatchBuilder setDescription(@Nullable String description) { if (description == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } - public DataJobInfoPatchBuilder setType(@Nonnull String type) { ObjectNode union = instance.objectNode(); union.set("string", instance.textNode(type)); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + TYPE_KEY, union)); return this; } public DataJobInfoPatchBuilder setFlowUrn(@Nullable DataFlowUrn flowUrn) { if (flowUrn == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + FLOW_URN_KEY, null)); } else { - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + FLOW_URN_KEY, - instance.textNode(flowUrn.toString()))); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + FLOW_URN_KEY, + instance.textNode(flowUrn.toString()))); } return this; } public DataJobInfoPatchBuilder setCreated(@Nullable TimeStamp created) { if (created == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + CREATED_KEY, null)); } else { ObjectNode createdNode = instance.objectNode(); createdNode.put(TIME_KEY, created.getTime()); if (created.getActor() != null) { createdNode.put(ACTOR_KEY, created.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + CREATED_KEY, createdNode)); } return this; } public DataJobInfoPatchBuilder setLastModified(@Nullable TimeStamp lastModified) { if (lastModified == null) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + LAST_MODIFIED_KEY, null)); } else { ObjectNode lastModifiedNode = instance.objectNode(); lastModifiedNode.put(TIME_KEY, lastModified.getTime()); if (lastModified.getActor() != null) { lastModifiedNode.put(ACTOR_KEY, lastModified.getActor().toString()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + LAST_MODIFIED_KEY, lastModifiedNode)); } return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java index 1ff6e817e40cf..0fb0454533fc0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/datajob/DataJobInputOutputPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.datajob; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.linkedin.common.Edge; @@ -12,11 +15,8 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuilder { +public class DataJobInputOutputPatchBuilder + extends AbstractMultiFieldPatchBuilder { private static final String INPUT_DATA_JOB_EDGES_PATH_START = "/inputDatajobEdges/"; private static final String INPUT_DATASET_EDGES_PATH_START = "/inputDatasetEdges/"; private static final String OUTPUT_DATASET_EDGES_PATH_START = "/outputDatasetEdges/"; @@ -39,65 +39,96 @@ public class DataJobInputOutputPatchBuilder extends AbstractMultiFieldPatchBuild public DataJobInputOutputPatchBuilder addInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { ObjectNode value = createEdgeValue(dataJobUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatajobEdge(@Nonnull DataJobUrn dataJobUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATA_JOB_EDGES_PATH_START + dataJobUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + INPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { ObjectNode value = createEdgeValue(datasetUrn); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + value)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetEdge(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + OUTPUT_DATASET_EDGES_PATH_START + datasetUrn, + null)); return this; } public DataJobInputOutputPatchBuilder addInputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeInputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), INPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } public DataJobInputOutputPatchBuilder addOutputDatasetField(@Nonnull Urn urn) { TextNode textNode = instance.textNode(urn.toString()); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, textNode)); return this; } public DataJobInputOutputPatchBuilder removeOutputDatasetField(@Nonnull Urn urn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), OUTPUT_DATASET_FIELDS_PATH_START + urn, null)); return this; } // Full Edge modification - public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder addEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { ObjectNode value = createEdgeValue(edge); String path = getEdgePath(edge, direction); @@ -105,7 +136,8 @@ public DataJobInputOutputPatchBuilder addEdge(@Nonnull Edge edge, @Nonnull Linea return this; } - public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull LineageDirection direction) { + public DataJobInputOutputPatchBuilder removeEdge( + @Nonnull Edge edge, @Nonnull LineageDirection direction) { String path = getEdgePath(edge, direction); pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), path, null)); @@ -115,11 +147,9 @@ public DataJobInputOutputPatchBuilder removeEdge(@Nonnull Edge edge, @Nonnull Li private ObjectNode createEdgeValue(@Nonnull Urn urn) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DESTINATION_URN_KEY, urn.toString()) - .set(LAST_MODIFIED_KEY, auditStamp); + value.put(DESTINATION_URN_KEY, urn.toString()).set(LAST_MODIFIED_KEY, auditStamp); value.set(CREATED_KEY, auditStamp); return value; @@ -151,11 +181,11 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { lastModified .put(TIME_KEY, edge.getLastModified().getTime()) .put(ACTOR_KEY, edge.getLastModified().getActor().toString()); - if (edge.getLastModified() .getImpersonator() != null) { + if (edge.getLastModified().getImpersonator() != null) { lastModified.put(IMPERSONATOR_KEY, edge.getLastModified().getImpersonator().toString()); } - if (edge.getLastModified() .getMessage() != null) { - lastModified.put(MESSAGE_KEY, edge.getLastModified() .getMessage()); + if (edge.getLastModified().getMessage() != null) { + lastModified.put(MESSAGE_KEY, edge.getLastModified().getMessage()); } } value.set(LAST_MODIFIED_KEY, lastModified); @@ -171,12 +201,13 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { value.put(SOURCE_URN_KEY, edge.getSourceUrn().toString()); } - return value; } /** - * Determines Edge path based on supplied Urn, if not a valid entity type throws IllegalArgumentException + * Determines Edge path based on supplied Urn, if not a valid entity type throws + * IllegalArgumentException + * * @param edge * @return * @throws IllegalArgumentException if destinationUrn is an invalid entity type @@ -184,21 +215,25 @@ private ObjectNode createEdgeValue(@Nonnull Edge edge) { private String getEdgePath(@Nonnull Edge edge, LineageDirection direction) { Urn destinationUrn = edge.getDestinationUrn(); - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.DOWNSTREAM.equals(direction)) { + if (DATASET_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.DOWNSTREAM.equals(direction)) { return INPUT_DATASET_EDGES_PATH_START + destinationUrn; } - if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) && LineageDirection.UPSTREAM.equals(direction)) { + if (DATA_JOB_ENTITY_NAME.equals(destinationUrn.getEntityType()) + && LineageDirection.UPSTREAM.equals(direction)) { return INPUT_DATA_JOB_EDGES_PATH_START + destinationUrn; } // TODO: Output Data Jobs not supported by aspect, add here if this changes - throw new IllegalArgumentException(String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); + throw new IllegalArgumentException( + String.format("Unsupported entity type: %s", destinationUrn.getEntityType())); } @Override @@ -210,5 +245,4 @@ protected String getAspectName() { protected String getEntityType() { return DATA_JOB_ENTITY_NAME; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java index d8c9b9308ae57..f4329c84f33ff 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/DatasetPropertiesPatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; @@ -11,11 +14,8 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilder +public class DatasetPropertiesPatchBuilder + extends AbstractMultiFieldPatchBuilder implements CustomPropertiesPatchBuilderSupport { public static final String BASE_PATH = "/"; @@ -29,62 +29,78 @@ public class DatasetPropertiesPatchBuilder extends AbstractMultiFieldPatchBuilde private CustomPropertiesPatchBuilder customPropertiesPatchBuilder = new CustomPropertiesPatchBuilder<>(this); - public DatasetPropertiesPatchBuilder setExternalUrl(@Nullable String externalUrl) { if (externalUrl == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + EXTERNAL_URL_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + EXTERNAL_URL_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + EXTERNAL_URL_KEY, - instance.textNode(externalUrl))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + EXTERNAL_URL_KEY, + instance.textNode(externalUrl))); } return this; } public DatasetPropertiesPatchBuilder setName(@Nullable String name) { if (name == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + NAME_KEY, instance.textNode(name))); } return this; } public DatasetPropertiesPatchBuilder setQualifiedName(@Nullable String qualifiedName) { if (qualifiedName == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + QUALIFIED_NAME_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + QUALIFIED_NAME_KEY, - instance.textNode(qualifiedName))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + QUALIFIED_NAME_KEY, + instance.textNode(qualifiedName))); } return this; } public DatasetPropertiesPatchBuilder setDescription(@Nullable String description) { if (description == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + DESCRIPTION_KEY, null)); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), BASE_PATH + DESCRIPTION_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + DESCRIPTION_KEY, - instance.textNode(description))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + DESCRIPTION_KEY, + instance.textNode(description))); } return this; } public DatasetPropertiesPatchBuilder setUri(@Nullable String uri) { if (uri == null) { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + URI_KEY, null)); + this.pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), BASE_PATH + URI_KEY, null)); } else { - this.pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); + this.pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), BASE_PATH + URI_KEY, instance.textNode(uri))); } return this; } @Override - public DatasetPropertiesPatchBuilder addCustomProperty(@Nonnull String key, @Nonnull String value) { + public DatasetPropertiesPatchBuilder addCustomProperty( + @Nonnull String key, @Nonnull String value) { this.customPropertiesPatchBuilder.addProperty(key, value); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java index 8b8dea275a3f4..6478b31d27ef0 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/EditableSchemaMetadataPatchBuilder.java @@ -1,21 +1,20 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.GlossaryTermAssociation; -import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.TagAssociation; +import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import datahub.client.patch.AbstractMultiFieldPatchBuilder; import datahub.client.patch.PatchOperationType; import javax.annotation.Nonnull; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - -public class EditableSchemaMetadataPatchBuilder extends - AbstractMultiFieldPatchBuilder { +public class EditableSchemaMetadataPatchBuilder + extends AbstractMultiFieldPatchBuilder { private static final String BASE_PATH = "/editableSchemaFieldInfo/"; private static final String TAGS_PATH_EXTENSION = "/globalTags/tags/"; @@ -24,39 +23,55 @@ public class EditableSchemaMetadataPatchBuilder extends private static final String URN_KEY = "urn"; private static final String CONTEXT_KEY = "context"; - public EditableSchemaMetadataPatchBuilder addTag(@Nonnull TagAssociation tag, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addTag( + @Nonnull TagAssociation tag, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(TAG_KEY, tag.getTag().toString()); if (tag.getContext() != null) { value.put(CONTEXT_KEY, tag.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag.getTag(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeTag(@Nonnull TagUrn tag, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, null)); + public EditableSchemaMetadataPatchBuilder removeTag( + @Nonnull TagUrn tag, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TAGS_PATH_EXTENSION + tag, + null)); return this; } - public EditableSchemaMetadataPatchBuilder addGlossaryTerm(@Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { + public EditableSchemaMetadataPatchBuilder addGlossaryTerm( + @Nonnull GlossaryTermAssociation term, @Nonnull String fieldPath) { ObjectNode value = instance.objectNode(); value.put(URN_KEY, term.getUrn().toString()); if (term.getContext() != null) { value.put(CONTEXT_KEY, term.getContext()); } - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), value)); + pathValues.add( + ImmutableTriple.of( + PatchOperationType.ADD.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term.getUrn(), + value)); return this; } - public EditableSchemaMetadataPatchBuilder removeGlossaryTerm(@Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), - BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, null)); + public EditableSchemaMetadataPatchBuilder removeGlossaryTerm( + @Nonnull GlossaryTermUrn term, @Nonnull String fieldPath) { + pathValues.add( + ImmutableTriple.of( + PatchOperationType.REMOVE.getValue(), + BASE_PATH + fieldPath + TERMS_PATH_EXTENSION + term, + null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java index 29330bee01ef3..6ded8a25b4e22 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/dataset/UpstreamLineagePatchBuilder.java @@ -1,5 +1,8 @@ package datahub.client.patch.dataset; +import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetLineageType; @@ -9,12 +12,9 @@ import lombok.ToString; import org.apache.commons.lang3.tuple.ImmutableTriple; -import static com.fasterxml.jackson.databind.node.JsonNodeFactory.*; -import static com.linkedin.metadata.Constants.*; - - @ToString -public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder { +public class UpstreamLineagePatchBuilder + extends AbstractMultiFieldPatchBuilder { private static final String PATH_START = "/upstreams/"; private static final String DATASET_KEY = "dataset"; @@ -23,21 +23,24 @@ public class UpstreamLineagePatchBuilder extends AbstractMultiFieldPatchBuilder< private static final String ACTOR_KEY = "actor"; private static final String TYPE_KEY = "type"; - public UpstreamLineagePatchBuilder addUpstream(@Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { + public UpstreamLineagePatchBuilder addUpstream( + @Nonnull DatasetUrn datasetUrn, @Nonnull DatasetLineageType lineageType) { ObjectNode value = instance.objectNode(); ObjectNode auditStamp = instance.objectNode(); - auditStamp.put(TIME_KEY, System.currentTimeMillis()) - .put(ACTOR_KEY, UNKNOWN_ACTOR); - value.put(DATASET_KEY, datasetUrn.toString()) + auditStamp.put(TIME_KEY, System.currentTimeMillis()).put(ACTOR_KEY, UNKNOWN_ACTOR); + value + .put(DATASET_KEY, datasetUrn.toString()) .put(TYPE_KEY, lineageType.toString()) .set(AUDIT_STAMP_KEY, auditStamp); - pathValues.add(ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.ADD.getValue(), PATH_START + datasetUrn, value)); return this; } public UpstreamLineagePatchBuilder removeUpstream(@Nonnull DatasetUrn datasetUrn) { - pathValues.add(ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); + pathValues.add( + ImmutableTriple.of(PatchOperationType.REMOVE.getValue(), PATH_START + datasetUrn, null)); return this; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java index 562ab715848b1..9f221bac15be4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/patch/subtypesupport/CustomPropertiesPatchBuilderSupport.java @@ -4,14 +4,12 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Interface to implement if an aspect supports custom properties changes - */ +/** Interface to implement if an aspect supports custom properties changes */ public interface CustomPropertiesPatchBuilderSupport> { /** * Adds a custom property + * * @param key * @param value * @return @@ -20,6 +18,7 @@ public interface CustomPropertiesPatchBuilderSupport The parent patch builder type */ public interface IntermediatePatchBuilder> { - /** - * Convenience method to return parent patch builder in functional callstack - */ + /** Convenience method to return parent patch builder in functional callstack */ T getParent(); /** - * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should - * usually only be called by the parent patch builder class when constructing the path values. + * Exposes subpath values to parent patch builder in Op, Path, Value triples. Should usually only + * be called by the parent patch builder class when constructing the path values. */ List> getSubPaths(); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java index 7396fa2d926d3..a2692c432513e 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitter.java @@ -1,77 +1,64 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; + +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.Emitter; +import datahub.client.MetadataResponseFuture; +import datahub.client.MetadataWriteResponse; +import datahub.event.EventFormatter; +import datahub.event.MetadataChangeProposalWrapper; +import datahub.event.UpsertAspectRequest; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; - import javax.annotation.concurrent.ThreadSafe; - +import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.concurrent.FutureCallback; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.entity.StringEntity; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.Emitter; -import datahub.client.MetadataResponseFuture; -import datahub.client.MetadataWriteResponse; -import datahub.event.EventFormatter; -import datahub.event.MetadataChangeProposalWrapper; -import datahub.event.UpsertAspectRequest; -import lombok.extern.slf4j.Slf4j; - -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.nio.client.HttpAsyncClient; import org.apache.http.ssl.SSLContextBuilder; -import java.security.KeyManagementException; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; - -import static com.linkedin.metadata.Constants.*; - - @ThreadSafe @Slf4j /** * The REST emitter is a thin wrapper on top of the Apache HttpClient - * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking emission of - * metadata and handles the details of JSON serialization of metadata aspects over the wire. + * (https://hc.apache.org/httpcomponents-client-4.5.x/index.html) library. It supports non-blocking + * emission of metadata and handles the details of JSON serialization of metadata aspects over the + * wire. * - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying - * http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + *

Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") ); You can also customize + * the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. e.g. + * RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") .customizeHttpAsyncClient(c + * :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); */ public class RestEmitter implements Emitter { @@ -87,29 +74,36 @@ public class RestEmitter implements Emitter { /** * The default constructor, prefer using the `create` factory method. + * * @param config */ public RestEmitter(RestEmitterConfig config) { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); this.config = config; // Override httpClient settings with RestEmitter configs if present if (config.getTimeoutSec() != null) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); - httpClientBuilder.setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(config.getTimeoutSec() * 1000) - .setSocketTimeout(config.getTimeoutSec() * 1000) - .build()); + httpClientBuilder.setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(config.getTimeoutSec() * 1000) + .setSocketTimeout(config.getTimeoutSec() * 1000) + .build()); } if (config.isDisableSslVerification()) { HttpAsyncClientBuilder httpClientBuilder = this.config.getAsyncHttpClientBuilder(); try { httpClientBuilder - .setSSLContext(new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) + .setSSLContext( + new SSLContextBuilder().loadTrustMaterial(null, TrustAllStrategy.INSTANCE).build()) .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE); } catch (KeyManagementException | NoSuchAlgorithmException | KeyStoreException e) { throw new RuntimeException("Error while creating insecure http client", e); @@ -127,8 +121,10 @@ public RestEmitter(RestEmitterConfig config) { private static MetadataWriteResponse mapResponse(HttpResponse response) { MetadataWriteResponse.MetadataWriteResponseBuilder builder = MetadataWriteResponse.builder().underlyingResponse(response); - if ((response != null) && (response.getStatusLine() != null) && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK - || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { + if ((response != null) + && (response.getStatusLine() != null) + && (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK + || response.getStatusLine().getStatusCode() == HttpStatus.SC_CREATED)) { builder.success(true); } else { builder.success(false); @@ -144,51 +140,49 @@ private static MetadataWriteResponse mapResponse(HttpResponse response) { length = contentStream.read(buffer); } builder.responseContent(result.toString("UTF-8")); - } catch (Exception e) { - // Catch all exceptions and still return a valid response object - log.warn("Wasn't able to convert response into a string", e); - } + } catch (Exception e) { + // Catch all exceptions and still return a valid response object + log.warn("Wasn't able to convert response into a string", e); + } return builder.build(); } - /** - * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` method. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") // coordinates of gms server - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * ); - * You can also customize the underlying http client by calling the `customizeHttpAsyncClient` method on the builder. - * e.g. - * RestEmitter emitter = RestEmitter.create(b :: b - * .server("http://localhost:8080") - * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") - * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) - * ); + * Constructing a REST Emitter follows a lambda-based fluent builder pattern using the `create` + * method. e.g. RestEmitter emitter = RestEmitter.create(b :: b .server("http://localhost:8080") + * // coordinates of gms server .extraHeaders(Collections.singletonMap("Custom-Header", + * "custom-val") ); You can also customize the underlying http client by calling the + * `customizeHttpAsyncClient` method on the builder. e.g. RestEmitter emitter = + * RestEmitter.create(b :: b .server("http://localhost:8080") + * .extraHeaders(Collections.singletonMap("Custom-Header", "custom-val") + * .customizeHttpAsyncClient(c :: c.setConnectionTimeToLive(30, TimeUnit.SECONDS)) ); + * * @param builderSupplier - * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid connection to the server + * @return a constructed RestEmitter. Call #testConnection to make sure this emitter has a valid + * connection to the server */ - public static RestEmitter create(Consumer builderSupplier) { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); + public static RestEmitter create( + Consumer builderSupplier) { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().with(builderSupplier).build()); return restEmitter; } /** * Creates a RestEmitter with default settings. - * @return a constructed RestEmitter. - * Call #test_connection to validate that this emitter can communicate with the server. + * + * @return a constructed RestEmitter. Call #test_connection to validate that this emitter can + * communicate with the server. */ public static RestEmitter createWithDefaults() { // No-op creator -> creates RestEmitter using default settings - return create(b -> { - }); + return create(b -> {}); } @Override - public Future emit(MetadataChangeProposalWrapper mcpw, - Callback callback) throws IOException { - return emit(this.eventFormatter.convert(mcpw), callback); + public Future emit(MetadataChangeProposalWrapper mcpw, Callback callback) + throws IOException { + return emit(this.eventFormatter.convert(mcpw), callback); } @Override @@ -201,8 +195,9 @@ public Future emit(MetadataChangeProposal mcp, Callback c return this.postGeneric(this.ingestProposalUrl, serializedMCP, mcp, callback); } - private Future postGeneric(String urlStr, String payloadJson, Object originalRequest, - Callback callback) throws IOException { + private Future postGeneric( + String urlStr, String payloadJson, Object originalRequest, Callback callback) + throws IOException { HttpPost httpPost = new HttpPost(urlStr); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("X-RestLi-Protocol-Version", "2.0.0"); @@ -214,48 +209,49 @@ private Future postGeneric(String urlStr, String payloadJ httpPost.setEntity(new StringEntity(payloadJson)); AtomicReference responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback httpCallback = new FutureCallback() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback httpCallback = + new FutureCallback() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -286,8 +282,8 @@ public Future emit(List request, Cal return this.postOpenAPI(request, callback); } - private Future postOpenAPI(List payload, Callback callback) - throws IOException { + private Future postOpenAPI( + List payload, Callback callback) throws IOException { HttpPost httpPost = new HttpPost(ingestOpenApiUrl); httpPost.setHeader("Content-Type", "application/json"); httpPost.setHeader("Accept", "application/json"); @@ -298,48 +294,49 @@ private Future postOpenAPI(List payl httpPost.setEntity(new StringEntity(objectMapper.writeValueAsString(payload))); AtomicReference responseAtomicReference = new AtomicReference<>(); CountDownLatch responseLatch = new CountDownLatch(1); - FutureCallback httpCallback = new FutureCallback() { - @Override - public void completed(HttpResponse response) { - MetadataWriteResponse writeResponse = null; - try { - writeResponse = mapResponse(response); - responseAtomicReference.set(writeResponse); - } catch (Exception e) { - // do nothing - } - responseLatch.countDown(); - if (callback != null) { - try { - callback.onCompletion(writeResponse); - } catch (Exception e) { - log.error("Error executing user callback on completion.", e); + FutureCallback httpCallback = + new FutureCallback() { + @Override + public void completed(HttpResponse response) { + MetadataWriteResponse writeResponse = null; + try { + writeResponse = mapResponse(response); + responseAtomicReference.set(writeResponse); + } catch (Exception e) { + // do nothing + } + responseLatch.countDown(); + if (callback != null) { + try { + callback.onCompletion(writeResponse); + } catch (Exception e) { + log.error("Error executing user callback on completion.", e); + } + } } - } - } - @Override - public void failed(Exception ex) { - if (callback != null) { - try { - callback.onFailure(ex); - } catch (Exception e) { - log.error("Error executing user callback on failure.", e); + @Override + public void failed(Exception ex) { + if (callback != null) { + try { + callback.onFailure(ex); + } catch (Exception e) { + log.error("Error executing user callback on failure.", e); + } + } } - } - } - @Override - public void cancelled() { - if (callback != null) { - try { - callback.onFailure(new RuntimeException("Cancelled")); - } catch (Exception e) { - log.error("Error executing user callback on failure due to cancellation.", e); + @Override + public void cancelled() { + if (callback != null) { + try { + callback.onFailure(new RuntimeException("Cancelled")); + } catch (Exception e) { + log.error("Error executing user callback on failure due to cancellation.", e); + } + } } - } - } - }; + }; Future requestFuture = httpClient.execute(httpPost, httpCallback); return new MetadataResponseFuture(requestFuture, responseAtomicReference, responseLatch); } @@ -348,5 +345,4 @@ public void cancelled() { HttpAsyncClient getHttpClient() { return this.httpClient; } - } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java index f615c3ccb3e4f..7e24429213246 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/client/rest/RestEmitterConfig.java @@ -13,7 +13,6 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; - @Value @Builder @Slf4j @@ -24,46 +23,43 @@ public class RestEmitterConfig { public static final String DEFAULT_AUTH_TOKEN = null; public static final String CLIENT_VERSION_PROPERTY = "clientVersion"; - @Builder.Default - private final String server = "http://localhost:8080"; + @Builder.Default private final String server = "http://localhost:8080"; private final Integer timeoutSec; - @Builder.Default - private final boolean disableSslVerification = false; - - @Builder.Default - private final String token = DEFAULT_AUTH_TOKEN; + @Builder.Default private final boolean disableSslVerification = false; - @Builder.Default - @NonNull - private final Map extraHeaders = Collections.EMPTY_MAP; + @Builder.Default private final String token = DEFAULT_AUTH_TOKEN; + + @Builder.Default @NonNull private final Map extraHeaders = Collections.EMPTY_MAP; private final HttpAsyncClientBuilder asyncHttpClientBuilder; @Builder.Default - private final EventFormatter eventFormatter = new EventFormatter(EventFormatter.Format.PEGASUS_JSON); + private final EventFormatter eventFormatter = + new EventFormatter(EventFormatter.Format.PEGASUS_JSON); public static class RestEmitterConfigBuilder { private String getVersion() { - try ( - InputStream foo = this.getClass().getClassLoader().getResourceAsStream("client.properties")) { - Properties properties = new Properties(); - properties.load(foo); - return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); + try (InputStream foo = + this.getClass().getClassLoader().getResourceAsStream("client.properties")) { + Properties properties = new Properties(); + properties.load(foo); + return properties.getProperty(CLIENT_VERSION_PROPERTY, "unknown"); } catch (Exception e) { log.warn("Unable to find a version for datahub-client. Will set to unknown", e); return "unknown"; } } - private HttpAsyncClientBuilder asyncHttpClientBuilder = HttpAsyncClientBuilder - .create() - .setDefaultRequestConfig(RequestConfig.custom() - .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) - .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) - .build()) - .setUserAgent("DataHub-RestClient/" + getVersion()); + private HttpAsyncClientBuilder asyncHttpClientBuilder = + HttpAsyncClientBuilder.create() + .setDefaultRequestConfig( + RequestConfig.custom() + .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_SEC * 1000) + .setSocketTimeout(DEFAULT_READ_TIMEOUT_SEC * 1000) + .build()) + .setUserAgent("DataHub-RestClient/" + getVersion()); public RestEmitterConfigBuilder with(Consumer builderFunction) { builderFunction.accept(this); @@ -76,4 +72,4 @@ public RestEmitterConfigBuilder customizeHttpAsyncClient( return this; } } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java index 5d42f814e1fe0..5238c19610601 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventFormatter.java @@ -1,5 +1,7 @@ package datahub.event; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -8,18 +10,12 @@ import com.linkedin.data.template.JacksonDataTemplateCodec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; - import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import lombok.SneakyThrows; -import static com.linkedin.metadata.Constants.*; - - -/** - * A class that helps to format Metadata events for transport - */ +/** A class that helps to format Metadata events for transport */ public class EventFormatter { private final ObjectMapper objectMapper; @@ -30,9 +26,13 @@ public class EventFormatter { public EventFormatter(Format serializationFormat) { this.serializationFormat = serializationFormat; objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } @@ -42,21 +42,29 @@ public EventFormatter() { @SneakyThrows(URISyntaxException.class) public MetadataChangeProposal convert(MetadataChangeProposalWrapper mcpw) throws IOException { - - String serializedAspect = StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); - MetadataChangeProposal mcp = new MetadataChangeProposal().setEntityType(mcpw.getEntityType()) - .setAspectName(mcpw.getAspectName()) - .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) - .setChangeType(mcpw.getChangeType()); + + String serializedAspect = + StringEscapeUtils.escapeJava(dataTemplateCodec.dataTemplateToString(mcpw.getAspect())); + MetadataChangeProposal mcp = + new MetadataChangeProposal() + .setEntityType(mcpw.getEntityType()) + .setAspectName(mcpw.getAspectName()) + .setEntityUrn(Urn.createFromString(mcpw.getEntityUrn())) + .setChangeType(mcpw.getChangeType()); switch (this.serializationFormat) { - case PEGASUS_JSON: { - mcp.setAspect(new GenericAspect().setContentType("application/json") - .setValue(ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); - } - break; + case PEGASUS_JSON: + { + mcp.setAspect( + new GenericAspect() + .setContentType("application/json") + .setValue( + ByteString.unsafeWrap(serializedAspect.getBytes(StandardCharsets.UTF_8)))); + } + break; default: - throw new EventValidationException("Cannot handle serialization format " + this.serializationFormat); + throw new EventValidationException( + "Cannot handle serialization format " + this.serializationFormat); } return mcp; } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java index 43778cb325971..dff3791a64ec9 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/EventValidationException.java @@ -4,6 +4,7 @@ public class EventValidationException extends RuntimeException { public EventValidationException(String message) { super(message); } + public EventValidationException(String message, Throwable t) { super(message, t); } diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java index 083a4cb40471b..4eb33015e33f4 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/MetadataChangeProposalWrapper.java @@ -12,9 +12,9 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** * A class that makes it easy to create new {@link MetadataChangeProposal} events + * * @param */ @Value @@ -53,7 +53,11 @@ public interface Build { } public static class MetadataChangeProposalWrapperBuilder - implements EntityUrnStepBuilder, EntityTypeStepBuilder, ChangeStepBuilder, AspectStepBuilder, Build { + implements EntityUrnStepBuilder, + EntityTypeStepBuilder, + ChangeStepBuilder, + AspectStepBuilder, + Build { private String entityUrn; private String entityType; @@ -116,9 +120,11 @@ public Build aspect(DataTemplate aspect) { @Override public MetadataChangeProposalWrapper build() { try { - Objects.requireNonNull(this.aspectName, + Objects.requireNonNull( + this.aspectName, "aspectName could not be inferred from provided aspect and was not explicitly provided as an override"); - return new MetadataChangeProposalWrapper(entityType, entityUrn, changeType, aspect, aspectName); + return new MetadataChangeProposalWrapper( + entityType, entityUrn, changeType, aspect, aspectName); } catch (Exception e) { throw new EventValidationException("Failed to create a metadata change proposal event", e); } @@ -131,7 +137,8 @@ public Build aspectName(String aspectName) { } } - public static MetadataChangeProposalWrapper create(Consumer builderConsumer) { + public static MetadataChangeProposalWrapper create( + Consumer builderConsumer) { MetadataChangeProposalWrapperBuilder builder = new MetadataChangeProposalWrapperBuilder(); builderConsumer.accept(builder); return builder.build(); diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java index 6fe07ac448a80..fa7c21fd41d9a 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/StringEscapeUtils.java @@ -1,22 +1,17 @@ /** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance with the License. You may obtain a + * copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + *

http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. + *

Unless required by applicable law or agreed to in writing, software distributed under the + * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing permissions and + * limitations under the License. */ - package datahub.event; import java.io.IOException; @@ -26,21 +21,20 @@ public class StringEscapeUtils { - private StringEscapeUtils() { + private StringEscapeUtils() {} - } - /** * Worker method for the {@link #escapeJavaScript(String)} method. - * + * * @param out write to receieve the escaped string * @param str String to escape values in, may be null * @param escapeSingleQuote escapes single quotes if true * @param escapeForwardSlash TODO * @throws IOException if an IOException occurs */ - private static void escapeJavaStyleString(Writer out, String str, boolean escapeSingleQuote, - boolean escapeForwardSlash) throws IOException { + private static void escapeJavaStyleString( + Writer out, String str, boolean escapeSingleQuote, boolean escapeForwardSlash) + throws IOException { if (out == null) { throw new IllegalArgumentException("The Writer must not be null"); } else if (str != null) { @@ -56,35 +50,35 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape out.write("\\u00" + hex(ch)); } else if (ch < ' ') { switch (ch) { - case '\b': - out.write(92); - out.write(98); - break; - case '\t': - out.write(92); - out.write(116); - break; - case '\n': - out.write(92); - out.write(110); - break; - case '\u000b': + case '\b': + out.write(92); + out.write(98); + break; + case '\t': + out.write(92); + out.write(116); + break; + case '\n': + out.write(92); + out.write(110); + break; + case '\u000b': - case '\f': - out.write(92); - out.write(102); - break; - case '\r': - out.write(92); - out.write(114); - break; - default: - if (ch > 15) { - out.write("\\u00" + hex(ch)); - } else { - out.write("\\u000" + hex(ch)); - } - break; + case '\f': + out.write(92); + out.write(102); + break; + case '\r': + out.write(92); + out.write(114); + break; + default: + if (ch > 15) { + out.write("\\u00" + hex(ch)); + } else { + out.write("\\u000" + hex(ch)); + } + break; } } else { @@ -95,8 +89,7 @@ private static void escapeJavaStyleString(Writer out, String str, boolean escape } /** - * Returns an upper case hexadecimal String for the given - * character. + * Returns an upper case hexadecimal String for the given character. * * @param ch The character to convert. * @return An upper case hexadecimal String @@ -113,34 +106,35 @@ private static String hex(char ch) { * @param escapeForwardSlash TODO * @return the escaped string */ - private static String escapeJavaStyleString(String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { + private static String escapeJavaStyleString( + String str, boolean escapeSingleQuotes, boolean escapeForwardSlash) throws IOException { if (str == null) { return null; } else { StringWriter writer = new StringWriter(str.length() * 2); escapeJavaStyleString(writer, str, escapeSingleQuotes, escapeForwardSlash); return writer.toString(); - } } - + /** * Escapes the characters in a String using Java String rules. - *

- * Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) - *

- * So a tab becomes the characters '\\' and 't'. - *

- * The only difference between Java strings and JavaScript strings - * is that in JavaScript, a single quote must be escaped. - *

- * Example: + * + *

Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) + * + *

So a tab becomes the characters '\\' and 't'. + * + *

The only difference between Java strings and JavaScript strings is that in JavaScript, a + * single quote must be escaped. + * + *

Example: + * *

    * input string: He didn't say, "Stop!"
    * output string: He didn't say, \"Stop!\"
    * 
* - * @param str String to escape values in, may be null + * @param str String to escape values in, may be null * @return String with escaped values, null if null string input */ public static String escapeJava(String str) throws IOException { diff --git a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java index eb834ccea2b91..7dfb9d33f6948 100644 --- a/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java +++ b/metadata-integration/java/datahub-client/src/main/java/datahub/event/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java index e591fee3f68a8..01b39f77913bc 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/file/FileEmitterTest.java @@ -1,7 +1,20 @@ package datahub.client.file; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.core.exc.StreamReadException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DatabindException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.JacksonDataTemplateCodec; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; +import datahub.client.Callback; +import datahub.client.MetadataWriteResponse; +import datahub.event.MetadataChangeProposalWrapper; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -10,47 +23,34 @@ import java.util.Map; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; - import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; -import com.fasterxml.jackson.core.exc.StreamReadException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.DatabindException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.data.DataMap; -import com.linkedin.data.template.JacksonDataTemplateCodec; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - -import datahub.client.Callback; -import datahub.client.MetadataWriteResponse; -import datahub.event.MetadataChangeProposalWrapper; - -import static com.linkedin.metadata.Constants.*; - - public class FileEmitterTest { private final ObjectMapper objectMapper; private final JacksonDataTemplateCodec dataTemplateCodec; public FileEmitterTest() { objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); dataTemplateCodec = new JacksonDataTemplateCodec(objectMapper.getFactory()); } - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Test public void testFileEmitter() throws IOException { - InputStream goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); + InputStream goldenFileStream = + ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/test.json"; @@ -61,24 +61,22 @@ public void testFileEmitter() throws IOException { emitter.close(); goldenFileStream = ClassLoader.getSystemResourceAsStream("golden_files/mcps_golden.json"); this.assertEqualJsonFile(goldenFileStream, outputFile); - } - private void assertEqualJsonFile(InputStream file1, String file2) throws StreamReadException, DatabindException, - IOException { - TypeReference>> typeRef = new TypeReference>>() { - }; + private void assertEqualJsonFile(InputStream file1, String file2) + throws StreamReadException, DatabindException, IOException { + TypeReference>> typeRef = + new TypeReference>>() {}; List> map1 = this.objectMapper.readValue(file1, typeRef); File f2 = new File(file2); List> map2 = this.objectMapper.readValue(f2, typeRef); Assert.assertEquals(map1, map2); } - private List getMCPs(InputStream fileStream) throws StreamReadException, DatabindException, - IOException { + private List getMCPs(InputStream fileStream) + throws StreamReadException, DatabindException, IOException { ArrayList mcps = new ArrayList(); - TypeReference[]> typeRef = new TypeReference[]>() { - }; + TypeReference[]> typeRef = new TypeReference[]>() {}; Map[] maps = this.objectMapper.readValue(fileStream, typeRef); for (Map map : maps) { String json = objectMapper.writeValueAsString(map); @@ -94,20 +92,24 @@ public void testSuccessCallback() throws Exception { String tempRoot = tempFolder.getRoot().toString(); String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference callbackResponse = new AtomicReference<>(); - Future future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertTrue(response.isSuccess()); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - } - }); + Future future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertTrue(response.isSuccess()); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + } + }); Assert.assertEquals(callbackResponse.get(), future.get()); } @@ -119,25 +121,27 @@ public void testFailCallback() throws Exception { String outputFile = tempRoot + "/testCallBack.json"; FileEmitter emitter = new FileEmitter(FileEmitterConfig.builder().fileName(outputFile).build()); emitter.close(); - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); - Future future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - - Assert.fail("Should not be called"); - } - - @Override - public void onFailure(Throwable exception) { - - } - }); + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + Future future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + + Assert.fail("Should not be called"); + } + + @Override + public void onFailure(Throwable exception) {} + }); Assert.assertFalse(future.get().isSuccess()); - } - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -145,5 +149,4 @@ private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String .aspect(new DatasetProperties().setDescription(description)) .build(); } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java index 520594381426f..f61121adf1395 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/AvroSerializerTest.java @@ -14,14 +14,12 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; - public class AvroSerializerTest { + @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); - @Rule - public TemporaryFolder tempFolder = new TemporaryFolder(); - - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -35,12 +33,14 @@ public void avroFileWrite() throws Exception { AvroSerializer avroSerializer = new AvroSerializer(); File file = tempFolder.newFile("data.avro"); - DatumWriter writer = new GenericDatumWriter(avroSerializer.getRecordSchema()); + DatumWriter writer = + new GenericDatumWriter(avroSerializer.getRecordSchema()); DataFileWriter dataFileWriter = new DataFileWriter(writer); dataFileWriter.create(avroSerializer.getRecordSchema(), file); String entityUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,logging_events,PROD)"; for (int i = 0; i < 10; ++i) { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + getMetadataChangeProposalWrapper("Test description - " + i, entityUrn); GenericRecord record = avroSerializer.serialize(metadataChangeProposalWrapper); dataFileWriter.append(record); } @@ -48,7 +48,8 @@ public void avroFileWrite() throws Exception { File readerFile = file; DatumReader reader = new GenericDatumReader<>(avroSerializer.getRecordSchema()); - DataFileReader dataFileReader = new DataFileReader(readerFile, reader); + DataFileReader dataFileReader = + new DataFileReader(readerFile, reader); while (dataFileReader.hasNext()) { GenericRecord record = dataFileReader.next(); System.out.println(record.get("entityUrn")); diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java index dff109cf1e455..5161e6460b8a1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/KafkaEmitterTest.java @@ -3,13 +3,20 @@ import static datahub.client.kafka.KafkaEmitter.DEFAULT_MCP_KAFKA_TOPIC; import static java.util.Collections.singletonList; +import com.linkedin.dataset.DatasetProperties; +import datahub.client.MetadataWriteResponse; +import datahub.client.kafka.containers.KafkaContainer; +import datahub.client.kafka.containers.SchemaRegistryContainer; +import datahub.client.kafka.containers.ZookeeperContainer; +import datahub.event.MetadataChangeProposalWrapper; +import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import java.io.IOException; import java.util.Objects; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.stream.Stream; - import org.apache.avro.Schema; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.admin.KafkaAdminClient; @@ -20,16 +27,6 @@ import org.testcontainers.containers.Network; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; - -import datahub.client.MetadataWriteResponse; -import datahub.client.kafka.containers.KafkaContainer; -import datahub.client.kafka.containers.SchemaRegistryContainer; -import datahub.client.kafka.containers.ZookeeperContainer; -import datahub.event.MetadataChangeProposalWrapper; -import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; -import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; - public class KafkaEmitterTest { private static final String TOPIC = DEFAULT_MCP_KAFKA_TOPIC; @@ -47,11 +44,13 @@ public class KafkaEmitterTest { public static void confluentSetup() throws Exception { network = Network.newNetwork(); zookeeperContainer = new ZookeeperContainer().withNetwork(network); - kafkaContainer = new KafkaContainer(zookeeperContainer.getInternalUrl()) + kafkaContainer = + new KafkaContainer(zookeeperContainer.getInternalUrl()) .withNetwork(network) .dependsOn(zookeeperContainer); - schemaRegistryContainer = new SchemaRegistryContainer(zookeeperContainer.getInternalUrl(), - kafkaContainer.getInternalBootstrapServers()) + schemaRegistryContainer = + new SchemaRegistryContainer( + zookeeperContainer.getInternalUrl(), kafkaContainer.getInternalBootstrapServers()) .withNetwork(network) .dependsOn(zookeeperContainer, kafkaContainer); schemaRegistryContainer.start(); @@ -78,8 +77,9 @@ public void testConnection() throws IOException, ExecutionException, Interrupted public void testSend() throws IOException, InterruptedException, ExecutionException { @SuppressWarnings("rawtypes") - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", - "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:spark,foo.bar,PROD)"); Future future = emitter.emit(mcpw); MetadataWriteResponse response = future.get(); System.out.println("Response: " + response); @@ -95,26 +95,41 @@ private static AdminClient createAdminClient(String bootstrap) { private static void registerSchemaRegistryTypes() throws IOException, RestClientException { Schema mcpSchema = new AvroSerializer().getRecordSchema(); - CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); + CachedSchemaRegistryClient schemaRegistryClient = + new CachedSchemaRegistryClient(schemaRegistryContainer.getUrl(), 1000); schemaRegistryClient.register(mcpSchema.getFullName(), mcpSchema); } private static String createTopics(Stream bootstraps) { short replicationFactor = 1; int partitions = 1; - return bootstraps.parallel().map(bootstrap -> { - try { - createAdminClient(bootstrap).createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))).all().get(); - return bootstrap; - } catch (RuntimeException | InterruptedException | ExecutionException ex) { - return null; - } - }).filter(Objects::nonNull).findFirst().get(); + return bootstraps + .parallel() + .map( + bootstrap -> { + try { + createAdminClient(bootstrap) + .createTopics(singletonList(new NewTopic(TOPIC, partitions, replicationFactor))) + .all() + .get(); + return bootstrap; + } catch (RuntimeException | InterruptedException | ExecutionException ex) { + return null; + } + }) + .filter(Objects::nonNull) + .findFirst() + .get(); } @SuppressWarnings("rawtypes") - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { - return MetadataChangeProposalWrapper.builder().entityType("dataset").entityUrn(entityUrn).upsert() - .aspect(new DatasetProperties().setDescription(description)).build(); + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { + return MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(entityUrn) + .upsert() + .aspect(new DatasetProperties().setDescription(description)) + .build(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java index 5f26748cb677c..86ced5ce620cd 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/KafkaContainer.java @@ -1,21 +1,17 @@ package datahub.client.kafka.containers; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; + import com.github.dockerjava.api.command.InspectContainerResponse; +import java.nio.charset.StandardCharsets; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; import org.testcontainers.images.builder.Transferable; import org.testcontainers.utility.TestcontainersConfiguration; -import java.nio.charset.StandardCharsets; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; - -/** - * This container wraps Confluent Kafka. - * - */ +/** This container wraps Confluent Kafka. */ public class KafkaContainer extends GenericContainer { private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; @@ -47,11 +43,17 @@ public KafkaContainer(String confluentPlatformVersion, String zookeeperConnect) // with itself via internal // listener when KAFKA_INTER_BROKER_LISTENER_NAME is set, otherwise Kafka will // try to use the advertised listener - withEnv("KAFKA_LISTENERS", - "PLAINTEXT://0.0.0.0:" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT - + ",BROKER://0.0.0.0:" + KAFKA_INTERNAL_PORT - + ",BROKER_LOCAL://0.0.0.0:" + KAFKA_LOCAL_PORT); - withEnv("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); + withEnv( + "KAFKA_LISTENERS", + "PLAINTEXT://0.0.0.0:" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT + + ",BROKER://0.0.0.0:" + + KAFKA_INTERNAL_PORT + + ",BROKER_LOCAL://0.0.0.0:" + + KAFKA_LOCAL_PORT); + withEnv( + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", + "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT,BROKER_LOCAL:PLAINTEXT"); withEnv("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER"); withEnv("KAFKA_BROKER_ID", "1"); @@ -68,8 +70,9 @@ public Stream getBootstrapServers() { if (port == PORT_NOT_ASSIGNED) { throw new IllegalStateException("You should start Kafka container first"); } - return Stream.of(String.format("PLAINTEXT://%s:%s", getHost(), port), - String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); + return Stream.of( + String.format("PLAINTEXT://%s:%s", getHost(), port), + String.format("PLAINTEXT://localhost:%s", getMappedPort(KAFKA_LOCAL_PORT))); } public String getInternalBootstrapServers() { @@ -78,7 +81,10 @@ public String getInternalBootstrapServers() { @Override protected void doStart() { - withCommand("sh", "-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); + withCommand( + "sh", + "-c", + "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + "sh " + STARTER_SCRIPT); super.doStart(); } @@ -100,22 +106,33 @@ protected void containerIsStarting(InspectContainerResponse containerInfo, boole String command = "#!/bin/bash \n"; command += "export KAFKA_ZOOKEEPER_CONNECT='" + zookeeperConnect + "'\n"; - command += "export KAFKA_ADVERTISED_LISTENERS='" + Stream - .concat(Stream.of("PLAINTEXT://" + networkAlias + ":" + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, + command += + "export KAFKA_ADVERTISED_LISTENERS='" + + Stream.concat( + Stream.of( + "PLAINTEXT://" + + networkAlias + + ":" + + KAFKA_INTERNAL_ADVERTISED_LISTENERS_PORT, "BROKER_LOCAL://localhost:" + getMappedPort(KAFKA_LOCAL_PORT)), - containerInfo.getNetworkSettings().getNetworks().values().stream() - .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) - .collect(Collectors.joining(",")) + "'\n"; + containerInfo.getNetworkSettings().getNetworks().values().stream() + .map(it -> "BROKER://" + it.getIpAddress() + ":" + KAFKA_INTERNAL_PORT)) + .collect(Collectors.joining(",")) + + "'\n"; command += ". /etc/confluent/docker/bash-config \n"; command += "/etc/confluent/docker/configure \n"; command += "/etc/confluent/docker/launch \n"; - copyFileToContainer(Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); + copyFileToContainer( + Transferable.of(command.getBytes(StandardCharsets.UTF_8), 700), STARTER_SCRIPT); } private static String getKafkaContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration.getInstance().getProperties().getOrDefault("kafka.container.image", - "confluentinc/cp-kafka:" + confluentPlatformVersion); + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "kafka.container.image", "confluentinc/cp-kafka:" + confluentPlatformVersion); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java index 907a4558b60d9..5c0223e580575 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/SchemaRegistryContainer.java @@ -5,46 +5,48 @@ import java.io.IOException; import java.time.Duration; - import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; import org.testcontainers.utility.TestcontainersConfiguration; - public class SchemaRegistryContainer extends GenericContainer { - private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; - - private final String networkAlias = "schema-registry"; - - public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) throws IOException { - this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); - } - - public SchemaRegistryContainer(String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) throws IOException { - super(getSchemaRegistryContainerImage(confluentPlatformVersion)); - - addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); - addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); - addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); - - withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); - withNetworkAliases(networkAlias); - - waitingFor(new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); - } - - public String getUrl() { - return format("http://%s:%d", this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); - } - - - private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "schemaregistry.container.image", - "confluentinc/cp-schema-registry:" + confluentPlatformVersion - ); - } + private static final int SCHEMA_REGISTRY_INTERNAL_PORT = 8081; + + private final String networkAlias = "schema-registry"; + + public SchemaRegistryContainer(String zookeeperConnect, String kafkaBootstrap) + throws IOException { + this(CONFLUENT_PLATFORM_VERSION, zookeeperConnect, kafkaBootstrap); + } + + public SchemaRegistryContainer( + String confluentPlatformVersion, String zookeeperConnect, String kafkaBootstrap) + throws IOException { + super(getSchemaRegistryContainerImage(confluentPlatformVersion)); + + addEnv("SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL", zookeeperConnect); + addEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost"); + addEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", kafkaBootstrap); + + withExposedPorts(SCHEMA_REGISTRY_INTERNAL_PORT); + withNetworkAliases(networkAlias); + + waitingFor( + new HttpWaitStrategy().forPath("/subjects").withStartupTimeout(Duration.ofMinutes(2))); + } + + public String getUrl() { + return format( + "http://%s:%d", + this.getContainerIpAddress(), this.getMappedPort(SCHEMA_REGISTRY_INTERNAL_PORT)); + } + + private static String getSchemaRegistryContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "schemaregistry.container.image", + "confluentinc/cp-schema-registry:" + confluentPlatformVersion); + } } - - diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java index efa79724f114b..93ba828096282 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/Utils.java @@ -6,18 +6,17 @@ final class Utils { public static final String CONFLUENT_PLATFORM_VERSION = "7.4.0"; - private Utils() { - } + private Utils() {} /** - * Retrieves a random port that is currently not in use on this machine. - * - * @return a free port - * @throws IOException wraps the exceptions which may occur during this method call. - */ - static int getRandomFreePort() throws IOException { - @SuppressWarnings("resource") - ServerSocket serverSocket = new ServerSocket(0); - return serverSocket.getLocalPort(); - } -} \ No newline at end of file + * Retrieves a random port that is currently not in use on this machine. + * + * @return a free port + * @throws IOException wraps the exceptions which may occur during this method call. + */ + static int getRandomFreePort() throws IOException { + @SuppressWarnings("resource") + ServerSocket serverSocket = new ServerSocket(0); + return serverSocket.getLocalPort(); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java index 5bfc5055df68a..538092d810ce1 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/kafka/containers/ZookeeperContainer.java @@ -1,48 +1,48 @@ package datahub.client.kafka.containers; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; -import org.testcontainers.utility.TestcontainersConfiguration; +import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; +import static java.lang.String.format; import java.io.IOException; import java.util.HashMap; - -import static datahub.client.kafka.containers.Utils.CONFLUENT_PLATFORM_VERSION; -import static java.lang.String.format; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HostPortWaitStrategy; +import org.testcontainers.utility.TestcontainersConfiguration; public class ZookeeperContainer extends GenericContainer { - private static final int ZOOKEEPER_INTERNAL_PORT = 2181; - private static final int ZOOKEEPER_TICK_TIME = 2000; - - private final String networkAlias = "zookeeper"; - - public ZookeeperContainer() throws IOException { - this(CONFLUENT_PLATFORM_VERSION); - } - - public ZookeeperContainer(String confluentPlatformVersion) throws IOException { - super(getZookeeperContainerImage(confluentPlatformVersion)); - - HashMap env = new HashMap(); - env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); - env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); - withEnv(env); - - addExposedPort(ZOOKEEPER_INTERNAL_PORT); - withNetworkAliases(networkAlias); - waitingFor(new HostPortWaitStrategy()); - } - - public String getInternalUrl() { - return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); - } - - private static String getZookeeperContainerImage(String confluentPlatformVersion) { - return (String) TestcontainersConfiguration - .getInstance().getProperties().getOrDefault( - "zookeeper.container.image", - "confluentinc/cp-zookeeper:" + confluentPlatformVersion - ); - } -} \ No newline at end of file + private static final int ZOOKEEPER_INTERNAL_PORT = 2181; + private static final int ZOOKEEPER_TICK_TIME = 2000; + + private final String networkAlias = "zookeeper"; + + public ZookeeperContainer() throws IOException { + this(CONFLUENT_PLATFORM_VERSION); + } + + public ZookeeperContainer(String confluentPlatformVersion) throws IOException { + super(getZookeeperContainerImage(confluentPlatformVersion)); + + HashMap env = new HashMap(); + env.put("ZOOKEEPER_CLIENT_PORT", Integer.toString(ZOOKEEPER_INTERNAL_PORT)); + env.put("ZOOKEEPER_TICK_TIME", Integer.toString(ZOOKEEPER_TICK_TIME)); + withEnv(env); + + addExposedPort(ZOOKEEPER_INTERNAL_PORT); + withNetworkAliases(networkAlias); + waitingFor(new HostPortWaitStrategy()); + } + + public String getInternalUrl() { + return format("%s:%d", networkAlias, ZOOKEEPER_INTERNAL_PORT); + } + + private static String getZookeeperContainerImage(String confluentPlatformVersion) { + return (String) + TestcontainersConfiguration.getInstance() + .getProperties() + .getOrDefault( + "zookeeper.container.image", + "confluentinc/cp-zookeeper:" + confluentPlatformVersion); + } +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java index c90d3f0d2179e..1d387acb0ce12 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/patch/PatchTest.java @@ -1,5 +1,7 @@ package datahub.client.patch; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Edge; import com.linkedin.common.FabricType; @@ -38,26 +40,25 @@ import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; -import static com.linkedin.metadata.Constants.*; - - @RunWith(MockitoJUnitRunner.class) public class PatchTest { - /** - * Examples for running patches, tests set to ignore as they target a GMS running on localhost - */ - + /** Examples for running patches, tests set to ignore as they target a GMS running on localhost */ @Test @Ignore public void testLocalUpstream() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), - DatasetLineageType.TRANSFORMED) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"), + DatasetLineageType.TRANSFORMED) + .build(); Future response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -72,10 +73,15 @@ public void testLocalUpstream() { public void testLocalUpstreamRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal upstreamPatch = new UpstreamLineagePatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeUpstream(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .build(); + MetadataChangeProposal upstreamPatch = + new UpstreamLineagePatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeUpstream( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .build(); Future response = restEmitter.emit(upstreamPatch); System.out.println(response.get().getResponseContent()); @@ -92,10 +98,13 @@ public void testLocalEditableSchemaMetadataTag() { try { TagAssociation tagAssociation = new TagAssociation(); tagAssociation.setTag(new TagUrn("Legacy")); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addTag(tagAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addTag(tagAssociation, "field_foo") + .build(); Future response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -111,10 +120,13 @@ public void testLocalEditableSchemaMetadataTagRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { TagUrn urn = new TagUrn("Legacy"); - MetadataChangeProposal fieldTagPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeTag(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTagPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeTag(urn, "field_foo") + .build(); Future response = restEmitter.emit(fieldTagPatch); System.out.println(response.get().getResponseContent()); @@ -132,10 +144,13 @@ public void testLocalEditableSchemaMetadataTerm() { GlossaryTermAssociation termAssociation = new GlossaryTermAssociation(); termAssociation.setUrn(new GlossaryTermUrn("CustomerAccount")); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .addGlossaryTerm(termAssociation, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .addGlossaryTerm(termAssociation, "field_foo") + .build(); Future response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -152,10 +167,13 @@ public void testLocalEditableSchemaMetadataTermRemove() { try { GlossaryTermUrn urn = new GlossaryTermUrn("CustomerAccount"); - MetadataChangeProposal fieldTermPatch = new EditableSchemaMetadataPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) - .removeGlossaryTerm(urn, "field_foo") - .build(); + MetadataChangeProposal fieldTermPatch = + new EditableSchemaMetadataPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)")) + .removeGlossaryTerm(urn, "field_foo") + .build(); Future response = restEmitter.emit(fieldTermPatch); System.out.println(response.get().getResponseContent()); @@ -168,16 +186,18 @@ public void testLocalEditableSchemaMetadataTermRemove() { @Test @Ignore public void testLocalOwnership() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .addOwner(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future response = fileEmitter.emit(ownershipPatch); response.get(); @@ -193,16 +213,15 @@ public void testLocalOwnership() { @Test @Ignore public void testLocalOwnershipRemove() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwner(new CorpuserUrn("gdoe")) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder().urn(datasetUrn).removeOwner(new CorpuserUrn("gdoe")).build(); System.out.println(ownershipPatch.toString()); Future response = fileEmitter.emit(ownershipPatch); response.get(); @@ -218,16 +237,18 @@ public void testLocalOwnershipRemove() { @Test @Ignore public void testLocalOwnershipRemoveType() { - FileEmitter fileEmitter = new FileEmitter(FileEmitterConfig.builder() - .fileName("test_mcp.json").build()); + FileEmitter fileEmitter = + new FileEmitter(FileEmitterConfig.builder().fileName("test_mcp.json").build()); RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal ownershipPatch = new OwnershipPatchBuilder() - .urn(datasetUrn) - .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal ownershipPatch = + new OwnershipPatchBuilder() + .urn(datasetUrn) + .removeOwnershipType(new CorpuserUrn("gdoe"), OwnershipType.TECHNICAL_OWNER) + .build(); System.out.println(ownershipPatch.toString()); Future response = fileEmitter.emit(ownershipPatch); response.get(); @@ -245,14 +266,17 @@ public void testLocalOwnershipRemoveType() { public void testLocalDataJobInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription("something") - .setName("name") - .setType("type") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription("something") + .setName("name") + .setType("type") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -267,12 +291,15 @@ public void testLocalDataJobInfo() { public void testLocalDataJobInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal jobInfoToPatch = new DataJobInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .setDescription(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal jobInfoToPatch = + new DataJobInfoPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .setDescription(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future response = restEmitter.emit(jobInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -288,14 +315,16 @@ public void testLocalDatasetProperties() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription("something") - .setName("name") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription("something") + .setName("name") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -311,14 +340,16 @@ public void testLocalDatasetPropertiesRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); - MetadataChangeProposal datasetPropertiesToPatch = new DatasetPropertiesPatchBuilder() - .urn(datasetUrn) - .setDescription(null) - .setName(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("hive"), "SampleHiveDataset", FabricType.PROD); + MetadataChangeProposal datasetPropertiesToPatch = + new DatasetPropertiesPatchBuilder() + .urn(datasetUrn) + .setDescription(null) + .setName(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future response = restEmitter.emit(datasetPropertiesToPatch); System.out.println(response.get().getResponseContent()); @@ -333,14 +364,15 @@ public void testLocalDatasetPropertiesRemove() { public void testLocalDataFlowInfo() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription("something") - .setName("name") - .setProject("project") - .addCustomProperty("prop1", "propVal1") - .addCustomProperty("prop2", "propVal2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription("something") + .setName("name") + .setProject("project") + .addCustomProperty("prop1", "propVal1") + .addCustomProperty("prop2", "propVal2") + .build(); Future response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -355,13 +387,14 @@ public void testLocalDataFlowInfo() { public void testLocalDataFlowInfoRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal flowInfoToPatch = new DataFlowInfoPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) - .setDescription(null) - .setProject(null) - .removeCustomProperty("prop1") - .removeCustomProperty("prop2") - .build(); + MetadataChangeProposal flowInfoToPatch = + new DataFlowInfoPatchBuilder() + .urn(UrnUtils.getUrn("urn:li:dataFlow:(orchestrator,flowId,cluster)")) + .setDescription(null) + .setProject(null) + .removeCustomProperty("prop1") + .removeCustomProperty("prop2") + .build(); Future response = restEmitter.emit(flowInfoToPatch); System.out.println(response.get().getResponseContent()); @@ -376,14 +409,27 @@ public void testLocalDataFlowInfoRemove() { public void testLocalDataJobInputAdd() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .addInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -398,14 +444,27 @@ public void testLocalDataJobInputAdd() { public void testLocalDataJobInputRemove() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .removeInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .removeOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .removeInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .removeInputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .removeOutputDatasetField(UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .removeInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .removeOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .removeInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .removeInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .removeOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -420,24 +479,54 @@ public void testLocalDataJobInputRemove() { public void testLocalDataJobInputAddEdge() { RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().build()); try { - Edge inputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge outputDataset = new Edge() - .setDestinationUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - Edge inputDataJob = new Edge() - .setDestinationUrn(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) - .setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) - .setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder() - .urn(UrnUtils.getUrn("urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) - .addEdge(inputDataset, LineageDirection.UPSTREAM) - .addEdge(outputDataset, LineageDirection.DOWNSTREAM) - .addEdge(inputDataJob, LineageDirection.UPSTREAM) - .build(); + Edge inputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge outputDataset = + new Edge() + .setDestinationUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + Edge inputDataJob = + new Edge() + .setDestinationUrn( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId2)")) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))) + .setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(UNKNOWN_ACTOR))); + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(orchestrator,flowId,cluster),jobId)")) + .addEdge(inputDataset, LineageDirection.UPSTREAM) + .addEdge(outputDataset, LineageDirection.DOWNSTREAM) + .addEdge(inputDataJob, LineageDirection.UPSTREAM) + .build(); Future response = restEmitter.emit(dataJobIOPatch); System.out.println(response.get().getResponseContent()); @@ -446,5 +535,4 @@ public void testLocalDataJobInputAddEdge() { System.out.println(Arrays.asList(e.getStackTrace())); } } - } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java index 190ca8a8313c2..657669d19439c 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/client/rest/RestEmitterTest.java @@ -1,5 +1,8 @@ package datahub.client.rest; +import static com.linkedin.metadata.Constants.*; +import static org.mockserver.model.HttpRequest.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.dataset.DatasetProperties; @@ -28,9 +31,7 @@ import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; - import javax.net.ssl.SSLHandshakeException; - import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; @@ -50,24 +51,16 @@ import org.mockserver.model.HttpRequest; import org.mockserver.model.RequestDefinition; -import static com.linkedin.metadata.Constants.*; -import static org.mockserver.model.HttpRequest.*; - - @RunWith(MockitoJUnitRunner.class) public class RestEmitterTest { - @Mock - HttpAsyncClientBuilder mockHttpClientFactory; + @Mock HttpAsyncClientBuilder mockHttpClientFactory; - @Mock - CloseableHttpAsyncClient mockClient; + @Mock CloseableHttpAsyncClient mockClient; - @Captor - ArgumentCaptor postArgumentCaptor; + @Captor ArgumentCaptor postArgumentCaptor; - @Captor - ArgumentCaptor callbackCaptor; + @Captor ArgumentCaptor callbackCaptor; @Before public void setupMocks() { @@ -79,7 +72,8 @@ public void testPost() throws URISyntaxException, IOException { RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory)); MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); + getMetadataChangeProposalWrapper( + "Test Dataset", "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)"); emitter.emit(mcp, null); Mockito.verify(mockClient).execute(postArgumentCaptor.capture(), callbackCaptor.capture()); FutureCallback callback = callbackCaptor.getValue(); @@ -90,26 +84,32 @@ public void testPost() throws URISyntaxException, IOException { byte[] contentBytes = new byte[(int) testPost.getEntity().getContentLength()]; is.read(contentBytes); String contentString = new String(contentBytes, StandardCharsets.UTF_8); - String expectedContent = "{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; + String expectedContent = + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset\\\"}\"}}}"; Assert.assertEquals(expectedContent, contentString); } - + @Test - public void testExceptions() throws URISyntaxException, IOException, ExecutionException, InterruptedException { + public void testExceptions() + throws URISyntaxException, IOException, ExecutionException, InterruptedException { RestEmitter emitter = RestEmitter.create($ -> $.asyncHttpClientBuilder(mockHttpClientFactory)); - MetadataChangeProposalWrapper mcp = MetadataChangeProposalWrapper.create(b -> b.entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("Test Dataset"))); + MetadataChangeProposalWrapper mcp = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar,PROD)") + .upsert() + .aspect(new DatasetProperties().setDescription("Test Dataset"))); Future mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); - Mockito.when(mockFuture.get()).thenThrow(new ExecutionException("Test execution exception", null)); + Mockito.when(mockFuture.get()) + .thenThrow(new ExecutionException("Test execution exception", null)); try { emitter.emit(mcp, null).get(); Assert.fail("should not be here"); @@ -120,10 +120,18 @@ public void testExceptions() throws URISyntaxException, IOException, ExecutionEx @Test public void testExtraHeaders() throws Exception { - RestEmitter emitter = RestEmitter.create(b -> b.asyncHttpClientBuilder(mockHttpClientFactory) - .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create( - b -> b.entityType("dataset").entityUrn("urn:li:dataset:foo").upsert().aspect(new DatasetProperties())); + RestEmitter emitter = + RestEmitter.create( + b -> + b.asyncHttpClientBuilder(mockHttpClientFactory) + .extraHeaders(Collections.singletonMap("Test-Header", "Test-Value"))); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:foo") + .upsert() + .aspect(new DatasetProperties())); Future mockFuture = Mockito.mock(Future.class); Mockito.when(mockClient.execute(Mockito.any(), Mockito.any())).thenReturn(mockFuture); emitter.emit(mcpw, null); @@ -151,11 +159,15 @@ public void multithreadedTestExecutors() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ExecutorService executor = Executors.newFixedThreadPool(10); ArrayList results = new ArrayList(); @@ -164,59 +176,82 @@ public void multithreadedTestExecutors() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { int finalI = i; - results.add(executor.submit(() -> { - try { - Thread.sleep(random.nextInt(100)); - MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), - String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); - Future future = emitter.emit(mcp, null); - MetadataWriteResponse response = future.get(); - Assert.assertTrue(response.isSuccess()); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - })); + results.add( + executor.submit( + () -> { + try { + Thread.sleep(random.nextInt(100)); + MetadataChangeProposalWrapper mcp = + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), + String.format( + "urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", finalI)); + Future future = emitter.emit(mcp, null); + MetadataWriteResponse response = future.get(); + Assert.assertTrue(response.isSuccess()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + })); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(100, recordedRequests.length); - List requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } - private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper(String description, String entityUrn) { + private MetadataChangeProposalWrapper getMetadataChangeProposalWrapper( + String description, String entityUrn) { return MetadataChangeProposalWrapper.builder() .entityType("dataset") .entityUrn(entityUrn) @@ -231,11 +266,15 @@ public void multithreadedTestSingleThreadCaller() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) .respond(org.mockserver.model.HttpResponse.response().withStatusCode(200)); ArrayList results = new ArrayList(); Random random = new Random(); @@ -243,46 +282,65 @@ public void multithreadedTestSingleThreadCaller() throws Exception { int numRequests = 100; for (int i = 0; i < numRequests; ++i) { MetadataChangeProposalWrapper mcp = - getMetadataChangeProposalWrapper(String.format("Test Dataset %d", testIteration), + getMetadataChangeProposalWrapper( + String.format("Test Dataset %d", testIteration), String.format("urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)", i)); Future future = emitter.emit(mcp, null); results.add(future); } - results.forEach(x -> { - try { - x.get(); - } catch (Exception e) { - Assert.fail(e.getMessage()); - } - }); + results.forEach( + x -> { + try { + x.get(); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + }); RequestDefinition[] recordedRequests = - testDataHubServer.getMockServer().retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); + testDataHubServer + .getMockServer() + .retrieveRecordedRequests(request().withPath("/aspects").withMethod("POST")); Assert.assertEquals(numRequests, recordedRequests.length); - List requests = Arrays.stream(recordedRequests) - .sequential() - .filter(x -> x instanceof HttpRequest) - .map(x -> (HttpRequest) x) - .collect(Collectors.toList()); + List requests = + Arrays.stream(recordedRequests) + .sequential() + .filter(x -> x instanceof HttpRequest) + .map(x -> (HttpRequest) x) + .collect(Collectors.toList()); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); for (int i = 0; i < numRequests; ++i) { - String expectedContent = String.format("{\"proposal\":{\"aspectName\":\"datasetProperties\"," - + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," - + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" - + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", i, testIteration); - - Assert.assertEquals(requests.stream().filter(x -> { - String bodyString = ""; - try { - bodyString = mapper.writeValueAsString( - mapper.readValue(x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); - } catch (IOException ioException) { - return false; - } - return bodyString.equals(expectedContent); - }).count(), 1); + String expectedContent = + String.format( + "{\"proposal\":{\"aspectName\":\"datasetProperties\"," + + "\"entityUrn\":\"urn:li:dataset:(urn:li:dataPlatform:hive,foo.bar-%d,PROD)\"," + + "\"entityType\":\"dataset\",\"changeType\":\"UPSERT\",\"aspect\":{\"contentType\":\"application/json\"" + + ",\"value\":\"{\\\"description\\\":\\\"Test Dataset %d\\\"}\"}}}", + i, testIteration); + + Assert.assertEquals( + requests.stream() + .filter( + x -> { + String bodyString = ""; + try { + bodyString = + mapper.writeValueAsString( + mapper.readValue( + x.getBodyAsString().getBytes(StandardCharsets.UTF_8), Map.class)); + } catch (IOException ioException) { + return false; + } + return bodyString.equals(expectedContent); + }) + .count(), + 1); } } @@ -292,30 +350,39 @@ public void testCallback() throws Exception { Integer port = testDataHubServer.getMockServer().getPort(); RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.unlimited()) - .respond(org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond( + org.mockserver.model.HttpResponse.response().withStatusCode(500).withBody("exception")); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); AtomicReference callbackResponse = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - Future future = emitter.emit(mcpw, new Callback() { - @Override - public void onCompletion(MetadataWriteResponse response) { - callbackResponse.set(response); - Assert.assertFalse(response.isSuccess()); - latch.countDown(); - } - - @Override - public void onFailure(Throwable exception) { - Assert.fail("Should not be called"); - latch.countDown(); - } - }); + Future future = + emitter.emit( + mcpw, + new Callback() { + @Override + public void onCompletion(MetadataWriteResponse response) { + callbackResponse.set(response); + Assert.assertFalse(response.isSuccess()); + latch.countDown(); + } + + @Override + public void onFailure(Throwable exception) { + Assert.fail("Should not be called"); + latch.countDown(); + } + }); latch.await(); Assert.assertEquals(callbackResponse.get(), future.get()); @@ -328,16 +395,22 @@ public void testTimeoutOnGet() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = emitter.emit(mcpw, null).get(); @@ -356,20 +429,28 @@ public void testTimeoutOnGetWithTimeout() { RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:" + port)); testDataHubServer.getMockServer().reset(); - testDataHubServer.getMockServer() - .when(request().withMethod("POST") - .withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal") - .withHeader("Content-type", "application/json"), Times.once()) - .respond(org.mockserver.model.HttpResponse.response() - .withStatusCode(200) - .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); - - MetadataChangeProposalWrapper mcpw = getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); + testDataHubServer + .getMockServer() + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withHeader("Content-type", "application/json"), + Times.once()) + .respond( + org.mockserver.model.HttpResponse.response() + .withStatusCode(200) + .withDelay(TimeUnit.SECONDS, RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC + 3)); + + MetadataChangeProposalWrapper mcpw = + getMetadataChangeProposalWrapper("Test Dataset", "urn:li:dataset:foo"); try { long startTime = System.currentTimeMillis(); MetadataWriteResponse response = - emitter.emit(mcpw, null).get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); + emitter + .emit(mcpw, null) + .get(RestEmitterConfig.DEFAULT_READ_TIMEOUT_SEC - 3, TimeUnit.SECONDS); long duration = (long) ((System.currentTimeMillis() - startTime) / 1000.0); Assert.fail("Should not succeed with duration " + duration); } catch (Exception ioe) { @@ -388,14 +469,16 @@ public void testUserAgentHeader() throws IOException, ExecutionException, Interr properties.load(emitter.getClass().getClassLoader().getResourceAsStream("client.properties")); Assert.assertNotNull(properties.getProperty("clientVersion")); String version = properties.getProperty("clientVersion"); - testDataHubServer.getMockServer().verify( - request("/config") - .withHeader("User-Agent", "DataHub-RestClient/" + version)); + testDataHubServer + .getMockServer() + .verify(request("/config").withHeader("User-Agent", "DataHub-RestClient/" + version)); } - + @Test - public void testDisableSslVerification() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); + public void testDisableSslVerification() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(true).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); @@ -403,10 +486,12 @@ public void testDisableSslVerification() throws IOException, InterruptedExceptio restEmitter.close(); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } - + @Test - public void testSslVerificationException() throws IOException, InterruptedException, ExecutionException { - RestEmitter restEmitter = new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); + public void testSslVerificationException() + throws IOException, InterruptedException, ExecutionException { + RestEmitter restEmitter = + new RestEmitter(RestEmitterConfig.builder().disableSslVerification(false).build()); final String hostWithSsl = "https://self-signed.badssl.com"; final HttpGet request = new HttpGet(hostWithSsl); try { @@ -418,4 +503,4 @@ public void testSslVerificationException() throws IOException, InterruptedExcept } restEmitter.close(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java index 70efcd240a0ef..0b2a4500e019d 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/EventFormatterTest.java @@ -1,49 +1,53 @@ package datahub.event; +import com.linkedin.dataset.DatasetProperties; +import com.linkedin.mxe.MetadataChangeProposal; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; - import org.junit.Test; import org.testng.Assert; -import com.linkedin.dataset.DatasetProperties; -import com.linkedin.mxe.MetadataChangeProposal; - - public class EventFormatterTest { @Test public void testPartialMCPW() throws URISyntaxException, IOException, EventValidationException { - MetadataChangeProposalWrapper metadataChangeProposalWrapper = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:foo") + MetadataChangeProposalWrapper metadataChangeProposalWrapper = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:foo") .upsert() - .aspect(new DatasetProperties().setDescription("A test dataset")) - .build(); + .aspect(new DatasetProperties().setDescription("A test dataset")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(metadataChangeProposalWrapper); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); Assert.assertEquals(content, "{\"description\":\"A test dataset\"}"); } - + @Test public void testUtf8Encoding() throws URISyntaxException, IOException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") - .upsert() - .aspect(new DatasetProperties().setDescription("This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) - .build(); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-project.my-dataset.user-table,PROD)") + .upsert() + .aspect( + new DatasetProperties() + .setDescription( + "This is the canonical User profile dataset œ∑´´†¥¨ˆˆπ“‘åß∂ƒ©˙∆˚¬…æΩ≈ç√∫˜˜≤≥ç")) + .build(); EventFormatter eventFormatter = new EventFormatter(); MetadataChangeProposal mcp = eventFormatter.convert(mcpw); Assert.assertEquals(mcp.getAspect().getContentType(), "application/json"); String content = mcp.getAspect().getValue().asString(StandardCharsets.UTF_8); - String expectedContent = "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" - + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" - + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; + String expectedContent = + "{\"description\":\"This is the canonical User profile dataset \\u0153\\u2211\\u00B4\\u00B4" + + "\\u2020\\u00A5\\u00A8\\u02C6\\u02C6\\u03C0\\u201C\\u2018\\u00E5\\u00DF\\u2202\\u0192\\u00A9\\u02D9\\u2206" + + "\\u02DA\\u00AC\\u2026\\u00E6\\u03A9\\u2248\\u00E7\\u221A\\u222B\\u02DC\\u02DC\\u2264\\u2265\\u00E7\"}"; Assert.assertEquals(content, expectedContent); } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java index 3d371954c0f37..3a333abc5cb10 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/event/MetadataChangeProposalWrapperTest.java @@ -3,75 +3,74 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataset.DatasetProperties; - import java.net.URISyntaxException; - import org.junit.Assert; import org.junit.Test; - public class MetadataChangeProposalWrapperTest { - /** - * We should throw errors on validation as exceptions - */ - @Test - public void testBuilderExceptions() { - try { - MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("foo") // bad urn should throw exception - ); - Assert.fail("Should throw an exception"); - } catch (EventValidationException e) { - Assert.assertTrue("Underlying exception should be a URI syntax issue", e.getCause() instanceof URISyntaxException); - } catch (Exception e) { - Assert.fail("Should not throw any other exception"); - } - } - - @Test - public void testAspectInferenceSuccess() throws EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new DatasetProperties())); - Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); - } - - /** - * We throw exceptions on using the regular builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) - .build(); + /** We should throw errors on validation as exceptions */ + @Test + public void testBuilderExceptions() { + try { + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataset").entityUrn("foo") // bad urn should throw exception + ); + Assert.fail("Should throw an exception"); + } catch (EventValidationException e) { + Assert.assertTrue( + "Underlying exception should be a URI syntax issue", + e.getCause() instanceof URISyntaxException); + } catch (Exception e) { + Assert.fail("Should not throw any other exception"); } + } - /** - * We throw exceptions on using the lambda builder pattern - * - * @throws URISyntaxException - * @throws EventValidationException - */ - @Test(expected = EventValidationException.class) - public void testAspectInferenceFailureLambda() throws URISyntaxException, EventValidationException { - Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.create(b -> b - .entityType("dataset") - .entityUrn("urn:li:dataset:(foo,bar,PROD)") - .upsert() - .aspect(new AuditStamp().setActor(actorUrn)) - ); - } + @Test + public void testAspectInferenceSuccess() throws EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new DatasetProperties())); + Assert.assertEquals(mcpw.getAspectName(), "datasetProperties"); + } + /** + * We throw exceptions on using the regular builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailure() throws URISyntaxException, EventValidationException { + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(Urn.createFromString("urn:li:corpUser:jdoe"))) + .build(); + } + /** + * We throw exceptions on using the lambda builder pattern + * + * @throws URISyntaxException + * @throws EventValidationException + */ + @Test(expected = EventValidationException.class) + public void testAspectInferenceFailureLambda() + throws URISyntaxException, EventValidationException { + Urn actorUrn = Urn.createFromString("urn:li:corpUser:jdoe"); + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.create( + b -> + b.entityType("dataset") + .entityUrn("urn:li:dataset:(foo,bar,PROD)") + .upsert() + .aspect(new AuditStamp().setActor(actorUrn))); + } } diff --git a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java index e7cdee3f369e1..44e60a4bde783 100644 --- a/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java +++ b/metadata-integration/java/datahub-client/src/test/java/datahub/server/TestDataHubServer.java @@ -1,11 +1,10 @@ package datahub.server; -import org.mockserver.integration.ClientAndServer; -import org.mockserver.matchers.Times; - import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.*; +import org.mockserver.integration.ClientAndServer; +import org.mockserver.matchers.Times; public class TestDataHubServer { @@ -26,17 +25,12 @@ public TestDataHubServer() { public void init() { mockServer - .when( - request() - .withMethod("GET") - .withPath("/config") - .withHeader("Content-type", "application/json"), - Times.unlimited() - ).respond( - org.mockserver.model.HttpResponse.response() - .withBody("{\"noCode\": true }") - ); + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java index e6f93eb1a4f0c..12bbb9e59ab95 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/com/google/protobuf/ExtensionRegistry.java @@ -32,7 +32,6 @@ import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.FieldDescriptor; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -89,300 +88,296 @@ * @author kenton@google.com Kenton Varda */ public class ExtensionRegistry extends ExtensionRegistryLite { - /** Construct a new, empty instance. */ - public static ExtensionRegistry newInstance() { - return new ExtensionRegistry(); - } - - /** Get the unmodifiable singleton empty instance. */ - public static ExtensionRegistry getEmptyRegistry() { - return EMPTY_REGISTRY; - } - - - /** Returns an unmodifiable view of the registry. */ - @Override - public ExtensionRegistry getUnmodifiable() { - return new ExtensionRegistry(this); - } - - /** A (Descriptor, Message) pair, returned by lookup methods. */ - public static final class ExtensionInfo { - /** The extension's descriptor. */ - public final FieldDescriptor descriptor; - - /** - * A default instance of the extension's type, if it has a message type. Otherwise, {@code - * null}. - */ - public final Message defaultInstance; - - private ExtensionInfo(final FieldDescriptor descriptor) { - this.descriptor = descriptor; - defaultInstance = null; - } - - private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { - this.descriptor = descriptor; - this.defaultInstance = defaultInstance; - } - } - - /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ - @Deprecated - public ExtensionInfo findExtensionByName(final String fullName) { - return findImmutableExtensionByName(fullName); - } + /** Construct a new, empty instance. */ + public static ExtensionRegistry newInstance() { + return new ExtensionRegistry(); + } + + /** Get the unmodifiable singleton empty instance. */ + public static ExtensionRegistry getEmptyRegistry() { + return EMPTY_REGISTRY; + } + + /** Returns an unmodifiable view of the registry. */ + @Override + public ExtensionRegistry getUnmodifiable() { + return new ExtensionRegistry(this); + } + + /** A (Descriptor, Message) pair, returned by lookup methods. */ + public static final class ExtensionInfo { + /** The extension's descriptor. */ + public final FieldDescriptor descriptor; /** - * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. - * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. + * A default instance of the extension's type, if it has a message type. Otherwise, {@code + * null}. */ - public ExtensionInfo findImmutableExtensionByName(final String fullName) { - return immutableExtensionsByName.get(fullName); - } + public final Message defaultInstance; - /** - * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. - * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByName(final String fullName) { - return mutableExtensionsByName.get(fullName); + private ExtensionInfo(final FieldDescriptor descriptor) { + this.descriptor = descriptor; + defaultInstance = null; } - /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ - @Deprecated - public ExtensionInfo findExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return findImmutableExtensionByNumber(containingType, fieldNumber); + private ExtensionInfo(final FieldDescriptor descriptor, final Message defaultInstance) { + this.descriptor = descriptor; + this.defaultInstance = defaultInstance; } - - /** - * Find an extension by containing type and field number for immutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findImmutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** Deprecated. Use {@link #findImmutableExtensionByName(String)} instead. */ + @Deprecated + public ExtensionInfo findExtensionByName(final String fullName) { + return findImmutableExtensionByName(fullName); + } + + /** + * Find an extension for immutable APIs by fully-qualified field name, in the proto namespace. + * i.e. {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByName(final String fullName) { + return immutableExtensionsByName.get(fullName); + } + + /** + * Find an extension for mutable APIs by fully-qualified field name, in the proto namespace. i.e. + * {@code result.descriptor.fullName()} will match {@code fullName} if a match is found. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByName(final String fullName) { + return mutableExtensionsByName.get(fullName); + } + + /** Deprecated. Use {@link #findImmutableExtensionByNumber( Descriptors.Descriptor, int)} */ + @Deprecated + public ExtensionInfo findExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return findImmutableExtensionByNumber(containingType, fieldNumber); + } + + /** + * Find an extension by containing type and field number for immutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findImmutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return immutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find an extension by containing type and field number for mutable APIs. + * + * @return Information about the extension if found, or {@code null} otherwise. + */ + public ExtensionInfo findMutableExtensionByNumber( + final Descriptor containingType, final int fieldNumber) { + return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + } + + /** + * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this + * method is more computationally expensive than getting a single extension by name or number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set getAllMutableExtensionsByExtendedType(final String fullName) { + HashSet extensions = new HashSet(); + for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(mutableExtensionsByNumber.get(pair)); + } } - - /** - * Find an extension by containing type and field number for mutable APIs. - * - * @return Information about the extension if found, or {@code null} otherwise. - */ - public ExtensionInfo findMutableExtensionByNumber( - final Descriptor containingType, final int fieldNumber) { - return mutableExtensionsByNumber.get(new DescriptorIntPair(containingType, fieldNumber)); + return extensions; + } + + /** + * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that + * this method is more computationally expensive than getting a single extension by name or + * number. + * + * @return Information about the extensions found, or {@code null} if there are none. + */ + public Set getAllImmutableExtensionsByExtendedType(final String fullName) { + HashSet extensions = new HashSet(); + for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { + if (pair.descriptor.getFullName().equals(fullName)) { + extensions.add(immutableExtensionsByNumber.get(pair)); + } } - - /** - * Find all extensions for mutable APIs by fully-qualified name of extended class. Note that this - * method is more computationally expensive than getting a single extension by name or number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set getAllMutableExtensionsByExtendedType(final String fullName) { - HashSet extensions = new HashSet(); - for (DescriptorIntPair pair : mutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(mutableExtensionsByNumber.get(pair)); - } - } - return extensions; + return extensions; + } + + /** Add an extension from a generated file to the registry. */ + public void add(final Extension extension) { + if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE + && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { + // do not support other extension types. ignore + return; } - - /** - * Find all extensions for immutable APIs by fully-qualified name of extended class. Note that - * this method is more computationally expensive than getting a single extension by name or - * number. - * - * @return Information about the extensions found, or {@code null} if there are none. - */ - public Set getAllImmutableExtensionsByExtendedType(final String fullName) { - HashSet extensions = new HashSet(); - for (DescriptorIntPair pair : immutableExtensionsByNumber.keySet()) { - if (pair.descriptor.getFullName().equals(fullName)) { - extensions.add(immutableExtensionsByNumber.get(pair)); - } - } - return extensions; + add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add an extension from a generated file to the registry. */ + public void add(final GeneratedMessage.GeneratedExtension extension) { + add((Extension) extension); + } + + static ExtensionInfo newExtensionInfo(final Extension extension) { + if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + if (extension.getMessageDefaultInstance() == null) { + throw new IllegalStateException( + "Registered message-type extension had null default instance: " + + extension.getDescriptor().getFullName()); + } + return new ExtensionInfo( + extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); + } else { + return new ExtensionInfo(extension.getDescriptor(), null); } - - /** Add an extension from a generated file to the registry. */ - public void add(final Extension extension) { - if (extension.getExtensionType() != Extension.ExtensionType.IMMUTABLE - && extension.getExtensionType() != Extension.ExtensionType.MUTABLE) { - // do not support other extension types. ignore - return; - } - add(newExtensionInfo(extension), extension.getExtensionType()); + } + + /** Add a non-message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type) { + if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() must be provided a default instance when " + + "adding an embedded message extension."); } - - /** Add an extension from a generated file to the registry. */ - public void add(final GeneratedMessage.GeneratedExtension extension) { - add((Extension) extension); + ExtensionInfo info = new ExtensionInfo(type, null); + add(info, Extension.ExtensionType.IMMUTABLE); + add(info, Extension.ExtensionType.MUTABLE); + } + + /** Add a message-type extension to the registry by descriptor. */ + public void add(final FieldDescriptor type, final Message defaultInstance) { + if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() provided a default instance for a non-message extension."); } - - static ExtensionInfo newExtensionInfo(final Extension extension) { - if (extension.getDescriptor().getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - if (extension.getMessageDefaultInstance() == null) { - throw new IllegalStateException( - "Registered message-type extension had null default instance: " - + extension.getDescriptor().getFullName()); - } - return new ExtensionInfo( - extension.getDescriptor(), (Message) extension.getMessageDefaultInstance()); - } else { - return new ExtensionInfo(extension.getDescriptor(), null); - } + add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + } + + // ================================================================= + // Private stuff. + + private ExtensionRegistry() { + this.immutableExtensionsByName = new HashMap(); + this.mutableExtensionsByName = new HashMap(); + this.immutableExtensionsByNumber = new HashMap(); + this.mutableExtensionsByNumber = new HashMap(); + } + + private ExtensionRegistry(ExtensionRegistry other) { + super(other); + this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); + this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); + this.immutableExtensionsByNumber = + Collections.unmodifiableMap(other.immutableExtensionsByNumber); + this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); + } + + private final Map immutableExtensionsByName; + private final Map mutableExtensionsByName; + private final Map immutableExtensionsByNumber; + private final Map mutableExtensionsByNumber; + + ExtensionRegistry(boolean empty) { + super(EMPTY_REGISTRY_LITE); + this.immutableExtensionsByName = Collections.emptyMap(); + this.mutableExtensionsByName = Collections.emptyMap(); + this.immutableExtensionsByNumber = Collections.emptyMap(); + this.mutableExtensionsByNumber = Collections.emptyMap(); + } + + static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); + + private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { + if (!extension.descriptor.isExtension()) { + throw new IllegalArgumentException( + "ExtensionRegistry.add() was given a FieldDescriptor for a regular " + + "(non-extension) field."); } - /** Add a non-message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type) { - if (type.getJavaType() == FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() must be provided a default instance when " - + "adding an embedded message extension."); - } - ExtensionInfo info = new ExtensionInfo(type, null); - add(info, Extension.ExtensionType.IMMUTABLE); - add(info, Extension.ExtensionType.MUTABLE); + Map extensionsByName; + Map extensionsByNumber; + switch (extensionType) { + case IMMUTABLE: + extensionsByName = immutableExtensionsByName; + extensionsByNumber = immutableExtensionsByNumber; + break; + case MUTABLE: + extensionsByName = mutableExtensionsByName; + extensionsByNumber = mutableExtensionsByNumber; + break; + default: + // Ignore the unknown supported type. + return; } - /** Add a message-type extension to the registry by descriptor. */ - public void add(final FieldDescriptor type, final Message defaultInstance) { - if (type.getJavaType() != FieldDescriptor.JavaType.MESSAGE) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() provided a default instance for a non-message extension."); - } - add(new ExtensionInfo(type, defaultInstance), Extension.ExtensionType.IMMUTABLE); + extensionsByName.put(extension.descriptor.getFullName(), extension); + extensionsByNumber.put( + new DescriptorIntPair( + extension.descriptor.getContainingType(), extension.descriptor.getNumber()), + extension); + + final FieldDescriptor field = extension.descriptor; + if (field.getContainingType().getOptions().getMessageSetWireFormat() + && field.getType() == FieldDescriptor.Type.MESSAGE + && field.isOptional() + && field.getExtensionScope() == field.getMessageType()) { + // This is an extension of a MessageSet type defined within the extension + // type's own scope. For backwards-compatibility, allow it to be looked + // up by type name. + extensionsByName.put(field.getMessageType().getFullName(), extension); } - - // ================================================================= - // Private stuff. - - private ExtensionRegistry() { - this.immutableExtensionsByName = new HashMap(); - this.mutableExtensionsByName = new HashMap(); - this.immutableExtensionsByNumber = new HashMap(); - this.mutableExtensionsByNumber = new HashMap(); + } + + /** + * DataHub modification of hashcode/equals based on full name. The upstream project uses the + * descriptor and in our use of the registry results in objects that are practically identical + * except for the `jsonName` field. This is a difference generated by internal components and is + * not under our control. + * + *

A (GenericDescriptor, int) pair, used as a map key. + */ + private static final class DescriptorIntPair { + private final String fullName; + private final Descriptor descriptor; + private final int number; + + DescriptorIntPair(final Descriptor descriptor, final int number) { + this.descriptor = descriptor; + this.fullName = descriptor.getFullName(); + this.number = number; } - private ExtensionRegistry(ExtensionRegistry other) { - super(other); - this.immutableExtensionsByName = Collections.unmodifiableMap(other.immutableExtensionsByName); - this.mutableExtensionsByName = Collections.unmodifiableMap(other.mutableExtensionsByName); - this.immutableExtensionsByNumber = - Collections.unmodifiableMap(other.immutableExtensionsByNumber); - this.mutableExtensionsByNumber = Collections.unmodifiableMap(other.mutableExtensionsByNumber); - } - - private final Map immutableExtensionsByName; - private final Map mutableExtensionsByName; - private final Map immutableExtensionsByNumber; - private final Map mutableExtensionsByNumber; - - ExtensionRegistry(boolean empty) { - super(EMPTY_REGISTRY_LITE); - this.immutableExtensionsByName = Collections.emptyMap(); - this.mutableExtensionsByName = Collections.emptyMap(); - this.immutableExtensionsByNumber = Collections.emptyMap(); - this.mutableExtensionsByNumber = Collections.emptyMap(); - } - - static final ExtensionRegistry EMPTY_REGISTRY = new ExtensionRegistry(true); - - private void add(final ExtensionInfo extension, final Extension.ExtensionType extensionType) { - if (!extension.descriptor.isExtension()) { - throw new IllegalArgumentException( - "ExtensionRegistry.add() was given a FieldDescriptor for a regular " - + "(non-extension) field."); - } - - Map extensionsByName; - Map extensionsByNumber; - switch (extensionType) { - case IMMUTABLE: - extensionsByName = immutableExtensionsByName; - extensionsByNumber = immutableExtensionsByNumber; - break; - case MUTABLE: - extensionsByName = mutableExtensionsByName; - extensionsByNumber = mutableExtensionsByNumber; - break; - default: - // Ignore the unknown supported type. - return; - } - - extensionsByName.put(extension.descriptor.getFullName(), extension); - extensionsByNumber.put( - new DescriptorIntPair( - extension.descriptor.getContainingType(), extension.descriptor.getNumber()), - extension); - - final FieldDescriptor field = extension.descriptor; - if (field.getContainingType().getOptions().getMessageSetWireFormat() - && field.getType() == FieldDescriptor.Type.MESSAGE - && field.isOptional() - && field.getExtensionScope() == field.getMessageType()) { - // This is an extension of a MessageSet type defined within the extension - // type's own scope. For backwards-compatibility, allow it to be looked - // up by type name. - extensionsByName.put(field.getMessageType().getFullName(), extension); - } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + DescriptorIntPair that = (DescriptorIntPair) o; + + if (number != that.number) { + return false; + } + return fullName.equals(that.fullName); } - /** - * - * DataHub modification of hashcode/equals based on full name. The upstream - * project uses the descriptor and in our use of the registry results - * in objects that are practically identical except for the `jsonName` field. - * This is a difference generated by internal components and is not under - * our control. - * - * A (GenericDescriptor, int) pair, used as a map key. - * - * */ - private static final class DescriptorIntPair { - private final String fullName; - private final Descriptor descriptor; - private final int number; - - DescriptorIntPair(final Descriptor descriptor, final int number) { - this.descriptor = descriptor; - this.fullName = descriptor.getFullName(); - this.number = number; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - DescriptorIntPair that = (DescriptorIntPair) o; - - if (number != that.number) { - return false; - } - return fullName.equals(that.fullName); - } - - @Override - public int hashCode() { - int result = fullName.hashCode(); - result = 31 * result + number; - return result; - } + @Override + public int hashCode() { + int result = fullName.hashCode(); + result = 31 * result + number; + return result; } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java index c0a6a2eaa410c..e4030e12574f0 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/DirectoryWalker.java @@ -25,51 +25,52 @@ public DirectoryWalker(String directory, String[] excludePatterns) { this.excludeMatchers.add(FileSystems.getDefault().getPathMatcher("glob:" + excludePattern)); } } - } public Stream walkFiles() throws IOException { final Path baseDir = this.rootDirectory; final ArrayList files = new ArrayList<>(); - Files.walkFileTree(this.rootDirectory, new FileVisitor() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - return FileVisitResult.CONTINUE; - } + Files.walkFileTree( + this.rootDirectory, + new FileVisitor() { + @Override + public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) + throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - boolean excluded = false; - Path relativePath = baseDir.relativize(file); - if (!includeMatcher.matches(relativePath)) { - excluded = true; - } else { - for (PathMatcher matcher : excludeMatchers) { - if (matcher.matches(relativePath)) { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) + throws IOException { + boolean excluded = false; + Path relativePath = baseDir.relativize(file); + if (!includeMatcher.matches(relativePath)) { excluded = true; + } else { + for (PathMatcher matcher : excludeMatchers) { + if (matcher.matches(relativePath)) { + excluded = true; + } + } } - } - } - if (!excluded) { - files.add(file); - } - return FileVisitResult.CONTINUE; - } + if (!excluded) { + files.add(file); + } + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } + @Override + public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - return FileVisitResult.CONTINUE; - } - }); + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + return FileVisitResult.CONTINUE; + } + }); return files.stream(); } - - } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java index dc49457e3e6e1..dcc95222fabf2 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/Proto2DataHub.java @@ -1,14 +1,13 @@ package datahub.protobuf; -import com.linkedin.common.FabricType; import com.linkedin.common.AuditStamp; +import com.linkedin.common.FabricType; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.DataPlatformUrn; import datahub.client.Emitter; import datahub.client.file.FileEmitter; import datahub.client.file.FileEmitterConfig; import datahub.client.rest.RestEmitter; - import java.io.FileInputStream; import java.io.InputStream; import java.nio.file.Files; @@ -25,350 +24,396 @@ import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; - -/** - * Rudimentary application - */ +/** Rudimentary application */ public class Proto2DataHub { - private static final Option OPTION_DATAHUB_PLATFORM = - Option.builder() - .longOpt("platform") - .hasArg() - .desc("[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") - .build(); - - private static final Option OPTION_DATAHUB_API = Option.builder() - .longOpt("datahub_api") - .hasArg() - .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") - .build(); - - private static final Option OPTION_DATAHUB_TOKEN = Option.builder() - .longOpt("datahub_token") - .hasArg() - .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") - .build(); - - private static final Option OPTION_DESCRIPTOR = Option.builder() - .longOpt("descriptor") - .hasArg() - .desc("[Required] The generated protobuf descriptor file. " - + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") - .required() - .build(); - - private static final Option OPTION_FILE = Option.builder() - .longOpt("file") - .hasArg() - .desc("[Optional if using --directory] The protobuf source file. Typically a .proto file.") - .build(); - - private static final Option OPTION_DIR = Option.builder() - .longOpt("directory") - .hasArg() - .desc("[Optional if using --file] The root directory containing protobuf source files.") - .build(); - - private static final Option OPTION_EXCLUDE_PATTERN = Option.builder() - .longOpt("exclude") - .valueSeparator(',') - .hasArgs() - .desc("[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " - + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " - + "and generated directories under the rootDirectory given by the --directory option") - .build(); - - private static final Option OPTION_DATAHUB_USER = Option.builder() - .longOpt("datahub_user") - .hasArg() - .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") - .build(); - - private static final Option OPTION_ENV = Option.builder() - .longOpt("env") - .hasArg() - .desc("[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") - .build(); - - private static final Option OPTION_GITHUB_ORG = Option.builder() - .longOpt("github_org") - .hasArg() - .desc("[Optional] The GitHub organization that this schema repository belongs to. " - + "We will translate comments in your protoc files like @datahub-project/data-team " - + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") - .build(); - - private static final Option OPTION_SLACK_ID = Option.builder() - .longOpt("slack_id") - .hasArg() - .desc("[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " - + "We will translate comments like #data-eng in your protobuf file to slack urls like: " - + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " - + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " - + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " - + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") - .build(); - - private static final Option OPTION_TRANSPORT = Option.builder() - .longOpt("transport") - .hasArg() - .desc("[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") - .build(); - - private static final Option OPTION_FILENAME = Option.builder() - .longOpt("filename") - .hasArg() - .desc("[Required if using transport file] Filename to write output to.") - .build(); - - private static final Option OPTION_HELP = Option.builder() - .longOpt("help") - .desc("Print this help message") - .build(); - - private static final Option OPTION_SUBTYPE = Option.builder() - .longOpt("subtype") - .desc("[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." - + "(Default is schema)") - .build(); - - enum TransportOptions { - REST, - KAFKA, - FILE + private static final Option OPTION_DATAHUB_PLATFORM = + Option.builder() + .longOpt("platform") + .hasArg() + .desc( + "[Optional] The data platform to produce schemas for. e.g. kafka, snowflake, etc. (defaults to kafka)") + .build(); + + private static final Option OPTION_DATAHUB_API = + Option.builder() + .longOpt("datahub_api") + .hasArg() + .desc("[Optional] The API endpoint for DataHub GMS. (defaults to https://localhost:8080)") + .build(); + + private static final Option OPTION_DATAHUB_TOKEN = + Option.builder() + .longOpt("datahub_token") + .hasArg() + .desc("[Optional] The authentication token for DataHub API access. (defaults to empty)") + .build(); + + private static final Option OPTION_DESCRIPTOR = + Option.builder() + .longOpt("descriptor") + .hasArg() + .desc( + "[Required] The generated protobuf descriptor file. " + + "Typically a single .dsc file for the repo or a .protoc file (1:1 with each src file)") + .required() + .build(); + + private static final Option OPTION_FILE = + Option.builder() + .longOpt("file") + .hasArg() + .desc( + "[Optional if using --directory] The protobuf source file. Typically a .proto file.") + .build(); + + private static final Option OPTION_DIR = + Option.builder() + .longOpt("directory") + .hasArg() + .desc("[Optional if using --file] The root directory containing protobuf source files.") + .build(); + + private static final Option OPTION_EXCLUDE_PATTERN = + Option.builder() + .longOpt("exclude") + .valueSeparator(',') + .hasArgs() + .desc( + "[Optional] Exclude patterns to avoid processing all source files, separated by ,. Typically used with --directory option. " + + "Follows glob patterns: e.g. --exclude \"build/**,generated/**\" will exclude all files in the build " + + "and generated directories under the rootDirectory given by the --directory option") + .build(); + + private static final Option OPTION_DATAHUB_USER = + Option.builder() + .longOpt("datahub_user") + .hasArg() + .desc("[Optional] The datahub user to attribute this ingestion to. (defaults to ..)") + .build(); + + private static final Option OPTION_ENV = + Option.builder() + .longOpt("env") + .hasArg() + .desc( + "[Optional] The environment to attach all entities to. Typically, DEV, PROD etc. (defaults to DEV)") + .build(); + + private static final Option OPTION_GITHUB_ORG = + Option.builder() + .longOpt("github_org") + .hasArg() + .desc( + "[Optional] The GitHub organization that this schema repository belongs to. " + + "We will translate comments in your protoc files like @datahub-project/data-team " + + "to GitHub team urls like: https://github.com/orgs/datahub-project/teams/data-team") + .build(); + + private static final Option OPTION_SLACK_ID = + Option.builder() + .longOpt("slack_id") + .hasArg() + .desc( + "[Optional] The Slack team id if your protobuf files contain comments with references to channel names. " + + "We will translate comments like #data-eng in your protobuf file to slack urls like: " + + "https://slack.com/app_redirect?channel=data-eng&team=T1234 following the " + + "documentation at (https://api.slack.com/reference/deep-linking#deep-linking-into-your-slack-app__opening-a-channel-by-name-or-id) " + + "The easiest way to find your Slack team id is to open your workspace in your browser. It should look " + + "something like: https://app.slack.com/client/TUMKD5EGJ/... In this case, the team-id is TUMKD5EGJ.") + .build(); + + private static final Option OPTION_TRANSPORT = + Option.builder() + .longOpt("transport") + .hasArg() + .desc( + "[Optional] What transport to use to communicate with DataHub. Options are: rest (default), kafka and file.") + .build(); + + private static final Option OPTION_FILENAME = + Option.builder() + .longOpt("filename") + .hasArg() + .desc("[Required if using transport file] Filename to write output to.") + .build(); + + private static final Option OPTION_HELP = + Option.builder().longOpt("help").desc("Print this help message").build(); + + private static final Option OPTION_SUBTYPE = + Option.builder() + .longOpt("subtype") + .desc( + "[Optional] A custom subtype to attach to all entities produced. e.g. event, schema, topic etc." + + "(Default is schema)") + .build(); + + enum TransportOptions { + REST, + KAFKA, + FILE + } + + static class AppConfig { + + private final String datahubUser; + private final FabricType fabricType; + private final String datahubAPI; + private final String datahubToken; + private final String githubOrg; + private final String slackId; + private final String dataPlatform; + private final String protoc; + private final String inputFile; + private final String inputDir; + private final TransportOptions transport; + private final String filename; + private final String subType; + private final String[] excludePatterns; + + AppConfig(CommandLine cli) { + Map env = System.getenv(); + datahubAPI = + cli.getOptionValue( + OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); + datahubToken = + cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); + datahubUser = + cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); + fabricType = + FabricType.valueOf( + cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")) + .toUpperCase(Locale.ROOT)); + githubOrg = + cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); + slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); + dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); + protoc = cli.getOptionValue(OPTION_DESCRIPTOR); + inputFile = cli.getOptionValue(OPTION_FILE, null); + transport = + TransportOptions.valueOf( + cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); + filename = cli.getOptionValue(OPTION_FILENAME, null); + subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); + inputDir = cli.getOptionValue(OPTION_DIR, null); + excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); } - static class AppConfig { - - private final String datahubUser; - private final FabricType fabricType; - private final String datahubAPI; - private final String datahubToken; - private final String githubOrg; - private final String slackId; - private final String dataPlatform; - private final String protoc; - private final String inputFile; - private final String inputDir; - private final TransportOptions transport; - private final String filename; - private final String subType; - private final String[] excludePatterns; - - - AppConfig(CommandLine cli) { - Map env = System.getenv(); - datahubAPI = cli.getOptionValue(OPTION_DATAHUB_API, env.getOrDefault("DATAHUB_API", "http://localhost:8080")); - datahubToken = cli.getOptionValue(OPTION_DATAHUB_TOKEN, env.getOrDefault("DATAHUB_TOKEN", "")); - datahubUser = cli.getOptionValue(OPTION_DATAHUB_USER, env.getOrDefault("DATAHUB_USER", "datahub")); - fabricType = FabricType.valueOf( - cli.getOptionValue(OPTION_ENV, env.getOrDefault("DATAHUB_ENV", "DEV")).toUpperCase(Locale.ROOT)); - githubOrg = cli.getOptionValue(OPTION_GITHUB_ORG, env.getOrDefault("DATAHUB_GITHUBORG", null)); - slackId = cli.getOptionValue(OPTION_SLACK_ID, env.getOrDefault("DATAHUB_SLACKID", null)); - dataPlatform = cli.getOptionValue(OPTION_DATAHUB_PLATFORM, "kafka").toLowerCase(Locale.ROOT); - protoc = cli.getOptionValue(OPTION_DESCRIPTOR); - inputFile = cli.getOptionValue(OPTION_FILE, null); - transport = TransportOptions.valueOf(cli.getOptionValue(OPTION_TRANSPORT, "rest").toUpperCase(Locale.ROOT)); - filename = cli.getOptionValue(OPTION_FILENAME, null); - subType = cli.getOptionValue(OPTION_SUBTYPE, "schema").toLowerCase(Locale.ROOT); - inputDir = cli.getOptionValue(OPTION_DIR, null); - excludePatterns = cli.getOptionValues(OPTION_EXCLUDE_PATTERN); + private AppConfig validate() throws Exception { + switch (transport) { + case FILE: + if (filename == null) { + throw new Exception("Transport file is being used, but a filename was not provided"); + } + break; + default: + // do nothing + } + if (this.protoc != null) { + Path path = Path.of(this.protoc); + if (!Files.exists(path)) { + throw new Exception( + String.format("Proto-descriptor file %s does not exist", this.protoc)); } - - private AppConfig validate() throws Exception { - switch (transport) { - case FILE: - if (filename == null) { - throw new Exception("Transport file is being used, but a filename was not provided"); - } - break; - default: - // do nothing - } - if (this.protoc != null) { - Path path = Path.of(this.protoc); - if (!Files.exists(path)) { - throw new Exception(String.format("Proto-descriptor file %s does not exist", this.protoc)); - } - if (!Files.isRegularFile(path)) { - throw new Exception(String.format("Proto-descriptor file %s is not a regular file", this.protoc)); - } - } - if ((this.inputFile == null) && (this.inputDir == null)) { - throw new Exception("Must provide either an input file or an input directory to read from"); - } - if (this.slackId != null) { - if (!this.slackId.startsWith("T")) { - throw new Exception(String.format("Slack team id %s should start with the letter T. " - + "The easiest way to find your Slack team id is to open your workspace in your browser. " - + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " - + "In this case, the team-id is TUMKD5EGJ.", this.slackId)); - } - } - return this; + if (!Files.isRegularFile(path)) { + throw new Exception( + String.format("Proto-descriptor file %s is not a regular file", this.protoc)); } - + } + if ((this.inputFile == null) && (this.inputDir == null)) { + throw new Exception("Must provide either an input file or an input directory to read from"); + } + if (this.slackId != null) { + if (!this.slackId.startsWith("T")) { + throw new Exception( + String.format( + "Slack team id %s should start with the letter T. " + + "The easiest way to find your Slack team id is to open your workspace in your browser. " + + "It should look something like: https://app.slack.com/client/TUMKD5EGJ/... " + + "In this case, the team-id is TUMKD5EGJ.", + this.slackId)); + } + } + return this; } - - private Proto2DataHub() { - + } + + private Proto2DataHub() {} + + public static void main(String[] args) throws Exception { + Options options = new Options(); + + options + .addOption(OPTION_DATAHUB_PLATFORM) + .addOption(OPTION_DATAHUB_API) + .addOption(OPTION_DATAHUB_TOKEN) + .addOption(OPTION_DESCRIPTOR) + .addOption(OPTION_FILE) + .addOption(OPTION_DIR) + .addOption(OPTION_EXCLUDE_PATTERN) + .addOption(OPTION_DATAHUB_USER) + .addOption(OPTION_GITHUB_ORG) + .addOption(OPTION_ENV) + .addOption(OPTION_SLACK_ID) + .addOption(OPTION_TRANSPORT) + .addOption(OPTION_FILENAME) + .addOption(OPTION_SUBTYPE) + .addOption(OPTION_HELP); + + Options firstPassOptions = new Options().addOption(OPTION_HELP); + + // create the parser + CommandLineParser parser = new DefaultParser(); + CommandLine cli = null; + cli = parser.parse(firstPassOptions, args, true); + if (cli.hasOption(OPTION_HELP)) { + printUsageAndExit(options, 0); } - public static void main(String[] args) throws Exception { - Options options = new Options(); - - options.addOption(OPTION_DATAHUB_PLATFORM) - .addOption(OPTION_DATAHUB_API) - .addOption(OPTION_DATAHUB_TOKEN) - .addOption(OPTION_DESCRIPTOR) - .addOption(OPTION_FILE) - .addOption(OPTION_DIR) - .addOption(OPTION_EXCLUDE_PATTERN) - .addOption(OPTION_DATAHUB_USER) - .addOption(OPTION_GITHUB_ORG) - .addOption(OPTION_ENV) - .addOption(OPTION_SLACK_ID) - .addOption(OPTION_TRANSPORT) - .addOption(OPTION_FILENAME) - .addOption(OPTION_SUBTYPE) - .addOption(OPTION_HELP); - - Options firstPassOptions = new Options() - .addOption(OPTION_HELP); - - // create the parser - CommandLineParser parser = new DefaultParser(); - CommandLine cli = null; - cli = parser.parse(firstPassOptions, args, true); - if (cli.hasOption(OPTION_HELP)) { - printUsageAndExit(options, 0); + try { + // parse the real command line arguments + cli = parser.parse(options, args); + } catch (Exception exp) { + // oops, something went wrong + // we try old-style format before giving up + try { + String[] translatedArgs = convertOldStyleArgsIfPossible(args); + if (translatedArgs != null) { + cli = parser.parse(options, translatedArgs); + } else { + System.err.println("Parsing failed. Reason: " + exp.getMessage()); + printUsageAndExit(options, 1); } + } catch (Exception secondExp) { + System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); + printUsageAndExit(options, 1); + } + } - try { - // parse the real command line arguments - cli = parser.parse(options, args); - } catch (Exception exp) { - // oops, something went wrong - // we try old-style format before giving up - try { - String[] translatedArgs = convertOldStyleArgsIfPossible(args); - if (translatedArgs != null) { - cli = parser.parse(options, translatedArgs); - } else { - System.err.println("Parsing failed. Reason: " + exp.getMessage()); - printUsageAndExit(options, 1); - } - } catch (Exception secondExp) { - System.err.println("Parsing failed. Reason: " + secondExp.getMessage()); - printUsageAndExit(options, 1); - } - } + AppConfig config = new AppConfig(cli).validate(); + Emitter emitter = null; + AtomicInteger totalEvents = new AtomicInteger(); - AppConfig config = new AppConfig(cli).validate(); - Emitter emitter = null; - AtomicInteger totalEvents = new AtomicInteger(); - - switch (config.transport) { - case REST: { - emitter = RestEmitter - .create(b -> b.server(config.datahubAPI).token(config.datahubToken)); - } break; - case KAFKA: { - throw new UnsupportedOperationException("Kafka transport is not supported yet."); - } - case FILE: { - emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); - } - break; - default: { - throw new UnsupportedOperationException(String - .format("%s transport is not supported yet.", config.transport)); - } + switch (config.transport) { + case REST: + { + emitter = RestEmitter.create(b -> b.server(config.datahubAPI).token(config.datahubToken)); + } + break; + case KAFKA: + { + throw new UnsupportedOperationException("Kafka transport is not supported yet."); + } + case FILE: + { + emitter = new FileEmitter(FileEmitterConfig.builder().fileName(config.filename).build()); + } + break; + default: + { + throw new UnsupportedOperationException( + String.format("%s transport is not supported yet.", config.transport)); } + } - AuditStamp auditStamp = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn(config.datahubUser)); + AuditStamp auditStamp = + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(new CorpuserUrn(config.datahubUser)); - InputStream protocStream = new FileInputStream(config.protoc); + InputStream protocStream = new FileInputStream(config.protoc); - Stream filePathStream = Stream.empty(); - if (config.inputFile != null) { - filePathStream = Stream.of(Path.of(config.inputFile)); - } else { - DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); - filePathStream = walker.walkFiles(); - } + Stream filePathStream = Stream.empty(); + if (config.inputFile != null) { + filePathStream = Stream.of(Path.of(config.inputFile)); + } else { + DirectoryWalker walker = new DirectoryWalker(config.inputDir, config.excludePatterns); + filePathStream = walker.walkFiles(); + } - Emitter finalEmitter = emitter; - AtomicInteger exitCode = new AtomicInteger(0); - AtomicInteger totalFiles = new AtomicInteger(0); - - try { - filePathStream.forEach(filePath -> { - totalFiles.incrementAndGet(); - try { - String textSchema = Files.readString(filePath); - - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) - .setProtocIn(new FileInputStream(config.protoc)) - .setFilename(filePath.toString()) - .setSchema(textSchema) - .setAuditStamp(auditStamp) - .setFabricType(config.fabricType) - .setGithubOrganization(config.githubOrg) - .setSlackTeamId(config.slackId) - .setSubType(config.subType) - .build(); - - dataset.getAllMetadataChangeProposals().flatMap(Collection::stream).forEach(mcpw -> { + Emitter finalEmitter = emitter; + AtomicInteger exitCode = new AtomicInteger(0); + AtomicInteger totalFiles = new AtomicInteger(0); + + try { + filePathStream.forEach( + filePath -> { + totalFiles.incrementAndGet(); + try { + String textSchema = Files.readString(filePath); + + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn(config.dataPlatform)) + .setProtocIn(new FileInputStream(config.protoc)) + .setFilename(filePath.toString()) + .setSchema(textSchema) + .setAuditStamp(auditStamp) + .setFabricType(config.fabricType) + .setGithubOrganization(config.githubOrg) + .setSlackTeamId(config.slackId) + .setSubType(config.subType) + .build(); + + dataset + .getAllMetadataChangeProposals() + .flatMap(Collection::stream) + .forEach( + mcpw -> { try { - finalEmitter.emit(mcpw, null).get(); - totalEvents.getAndIncrement(); + finalEmitter.emit(mcpw, null).get(); + totalEvents.getAndIncrement(); } catch (Exception e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); - } catch (Exception e) { - if (e.getMessage() != null && e.getMessage().equals("Cannot autodetect protobuf Message.")) { - System.err.printf("WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); - } else { - e.printStackTrace(); - System.err.println(String.format("‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", - config.transport, totalEvents.get())); - exitCode.set(1); - } - } - }); - } finally { - if (emitter != null) { - emitter.close(); - } + }); + } catch (Exception e) { + if (e.getMessage() != null + && e.getMessage().equals("Cannot autodetect protobuf Message.")) { + System.err.printf( + "WARN: Top-level schema not found in %s, no dataset emitted%n", args[1]); + } else { + e.printStackTrace(); + System.err.println( + String.format( + "‼️ Failed to emit to DataHub over %s. Num events emitted so far %d", + config.transport, totalEvents.get())); + exitCode.set(1); + } + } + }); + } finally { + if (emitter != null) { + emitter.close(); + } } if (exitCode.get() == 0) { - System.out.println( - String.format("✅ Successfully emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "✅ Successfully emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } else { - System.out.println( - String.format("‼️ Emitted %d events for %d files to DataHub %s", totalEvents.get(), totalFiles.get(), config.transport)); + System.out.println( + String.format( + "‼️ Emitted %d events for %d files to DataHub %s", + totalEvents.get(), totalFiles.get(), config.transport)); } System.exit(exitCode.get()); -} + } - private static String[] convertOldStyleArgsIfPossible(String[] args) { - if (args.length == 2) { - String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; - return translatedArgs; - } else { - return null; - } + private static String[] convertOldStyleArgsIfPossible(String[] args) { + if (args.length == 2) { + String[] translatedArgs = {"--descriptor", args[0], "--file", args[1]}; + return translatedArgs; + } else { + return null; } + } - private static void printUsageAndExit(Options options, int exitCode) { - HelpFormatter helpFormatter = new HelpFormatter(); - helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); - System.exit(exitCode); - } + private static void printUsageAndExit(Options options, int exitCode) { + HelpFormatter helpFormatter = new HelpFormatter(); + helpFormatter.printHelp(Proto2DataHub.class.getSimpleName(), options); + System.exit(exitCode); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java index 312b3785ac791..e0c27ebea18bc 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufDataset.java @@ -15,6 +15,7 @@ import com.linkedin.schema.SchemaFieldArray; import com.linkedin.schema.SchemaMetadata; import com.linkedin.util.Pair; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; @@ -26,270 +27,282 @@ import datahub.protobuf.visitors.dataset.PropertyVisitor; import datahub.protobuf.visitors.dataset.TagAssociationVisitor; import datahub.protobuf.visitors.dataset.TermAssociationVisitor; -import datahub.protobuf.visitors.field.SchemaFieldVisitor; -import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.visitors.field.ProtobufExtensionFieldVisitor; +import datahub.protobuf.visitors.field.SchemaFieldVisitor; import datahub.protobuf.visitors.tags.TagVisitor; - -import javax.annotation.Nullable; import java.io.IOException; import java.io.InputStream; import java.util.Base64; import java.util.Collection; import java.util.Comparator; -import java.util.Optional; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import javax.annotation.Nullable; public class ProtobufDataset { - public static ProtobufDataset.Builder builder() { - return new Builder(); - } - - public static class Builder { - private DataPlatformUrn dataPlatformUrn; - private DatasetUrn datasetUrn; - private FabricType fabricType; - private AuditStamp auditStamp; - private byte[] protocBytes; - private String messageName; - private String filename; - private String schema; - private String githubOrganization; - private String slackTeamId; - private String subType; - - public Builder setGithubOrganization(@Nullable String githubOrganization) { - this.githubOrganization = githubOrganization; - return this; - } - - public Builder setSlackTeamId(@Nullable String slackTeamId) { - this.slackTeamId = slackTeamId; - return this; - } - - public Builder setProtocIn(InputStream protocIn) throws IOException { - return setProtocBytes(protocIn.readAllBytes()); - } - - public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { - this.dataPlatformUrn = dataPlatformUrn; - return this; - } - - public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { - this.datasetUrn = datasetUrn; - return this; - } - - public Builder setProtocBytes(byte[] protocBytes) { - this.protocBytes = protocBytes; - return this; - } - - public Builder setFabricType(FabricType fabricType) { - this.fabricType = fabricType; - return this; - } - - public Builder setAuditStamp(AuditStamp auditStamp) { - this.auditStamp = auditStamp; - return this; - } - - public Builder setMessageName(@Nullable String messageName) { - this.messageName = messageName; - return this; - } - public Builder setFilename(@Nullable String filename) { - this.filename = filename; - return this; - } - - public Builder setSchema(@Nullable String schema) { - this.schema = schema; - return this; - } - - public Builder setSubType(@Nullable String subType) { - this.subType = subType; - return this; - } - - public ProtobufDataset build() throws IOException { - FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); - - return new ProtobufDataset( - this, - Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), - datasetUrn, - new ProtobufGraph(fileSet, messageName, filename), schema, auditStamp, fabricType) - .setMetadataChangeProposalVisitors( - List.of( - new TagVisitor() - ) - ) - .setFieldVisitor(new ProtobufExtensionFieldVisitor()) - .setDatasetVisitor(DatasetVisitor.builder() - .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) - .datasetPropertyVisitors( - List.of( - new KafkaTopicPropertyVisitor(), - new PropertyVisitor() - ) - ) - .institutionalMemoryMetadataVisitors( - List.of( - new InstitutionalMemoryVisitor(slackTeamId, githubOrganization) - ) - ) - .tagAssociationVisitors( - List.of( - new TagAssociationVisitor() - ) - ) - .termAssociationVisitors( - List.of( - new TermAssociationVisitor() - ) - ) - .ownershipVisitors( - List.of( - new OwnershipVisitor() - ) - ) - .domainVisitors( - List.of( - new DomainVisitor() - ) - ) - .build() - ) - .setSubType(subType); - } + public static ProtobufDataset.Builder builder() { + return new Builder(); + } + + public static class Builder { + private DataPlatformUrn dataPlatformUrn; + private DatasetUrn datasetUrn; + private FabricType fabricType; + private AuditStamp auditStamp; + private byte[] protocBytes; + private String messageName; + private String filename; + private String schema; + private String githubOrganization; + private String slackTeamId; + private String subType; + + public Builder setGithubOrganization(@Nullable String githubOrganization) { + this.githubOrganization = githubOrganization; + return this; } - private final DatasetUrn datasetUrn; - private final Optional schemaSource; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - private Optional subType; - private final VisitContext.VisitContextBuilder contextBuilder; - private final ProtobufDataset.Builder builder; - - private DatasetVisitor datasetVisitor; - private ProtobufModelVisitor> fieldVisitor; - private List>> mcpwVisitors; - - public ProtobufDataset(DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, String schema, - AuditStamp auditStamp, FabricType fabricType) { - this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + public Builder setSlackTeamId(@Nullable String slackTeamId) { + this.slackTeamId = slackTeamId; + return this; } - public ProtobufDataset(ProtobufDataset.Builder builder, DataPlatformUrn dataPlatformUrn, DatasetUrn datasetUrn, ProtobufGraph graph, - String schema, AuditStamp auditStamp, FabricType fabricType) { - this.builder = builder; - this.schemaSource = Optional.ofNullable(schema); - this.auditStamp = auditStamp; - this.graph = graph; - this.subType = Optional.empty(); - - // Default - non-protobuf extension - fieldVisitor = new SchemaFieldVisitor(); - mcpwVisitors = List.of(); - - this.datasetUrn = datasetUrn != null ? datasetUrn : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); - this.contextBuilder = VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + public Builder setProtocIn(InputStream protocIn) throws IOException { + return setProtocBytes(protocIn.readAllBytes()); } - public ProtobufDataset setMetadataChangeProposalVisitors(List>> visitors) { - this.mcpwVisitors = visitors; - return this; + public Builder setDataPlatformUrn(@Nullable DataPlatformUrn dataPlatformUrn) { + this.dataPlatformUrn = dataPlatformUrn; + return this; } - public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { - this.datasetVisitor = datasetVisitor; - return this; + public Builder setDatasetUrn(@Nullable DatasetUrn datasetUrn) { + this.datasetUrn = datasetUrn; + return this; } - public ProtobufDataset setFieldVisitor(ProtobufModelVisitor> visitor) { - this.fieldVisitor = visitor; - return this; + public Builder setProtocBytes(byte[] protocBytes) { + this.protocBytes = protocBytes; + return this; } - public ProtobufDataset setSubType(String subType) { - this.subType = Optional.ofNullable(subType); - return this; + public Builder setFabricType(FabricType fabricType) { + this.fabricType = fabricType; + return this; } - public ProtobufDataset.Builder toBuilder() { - return builder; + public Builder setAuditStamp(AuditStamp auditStamp) { + this.auditStamp = auditStamp; + return this; } - public ProtobufGraph getGraph() { - return graph; + public Builder setMessageName(@Nullable String messageName) { + this.messageName = messageName; + return this; } - public AuditStamp getAuditStamp() { - return auditStamp; + public Builder setFilename(@Nullable String filename) { + this.filename = filename; + return this; } - public DatasetUrn getDatasetUrn() { - return datasetUrn; + public Builder setSchema(@Nullable String schema) { + this.schema = schema; + return this; } - public Stream>> getAllMetadataChangeProposals() { - return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + public Builder setSubType(@Nullable String subType) { + this.subType = subType; + return this; } - public List> getVisitorMCPs() { - return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + public ProtobufDataset build() throws IOException { + FileDescriptorSet fileSet = FileDescriptorSet.parseFrom(protocBytes); + + return new ProtobufDataset( + this, + Optional.ofNullable(dataPlatformUrn).orElse(new DataPlatformUrn("kafka")), + datasetUrn, + new ProtobufGraph(fileSet, messageName, filename), + schema, + auditStamp, + fabricType) + .setMetadataChangeProposalVisitors(List.of(new TagVisitor())) + .setFieldVisitor(new ProtobufExtensionFieldVisitor()) + .setDatasetVisitor( + DatasetVisitor.builder() + .protocBase64(Base64.getEncoder().encodeToString(protocBytes)) + .datasetPropertyVisitors( + List.of(new KafkaTopicPropertyVisitor(), new PropertyVisitor())) + .institutionalMemoryMetadataVisitors( + List.of(new InstitutionalMemoryVisitor(slackTeamId, githubOrganization))) + .tagAssociationVisitors(List.of(new TagAssociationVisitor())) + .termAssociationVisitors(List.of(new TermAssociationVisitor())) + .ownershipVisitors(List.of(new OwnershipVisitor())) + .domainVisitors(List.of(new DomainVisitor())) + .build()) + .setSubType(subType); } - - public List> getDatasetMCPs() { - Stream> mcpStream = - Stream.concat(this.graph.accept(contextBuilder, List.of(datasetVisitor)), + } + + private final DatasetUrn datasetUrn; + private final Optional schemaSource; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + private Optional subType; + private final VisitContext.VisitContextBuilder contextBuilder; + private final ProtobufDataset.Builder builder; + + private DatasetVisitor datasetVisitor; + private ProtobufModelVisitor> fieldVisitor; + private List>> + mcpwVisitors; + + public ProtobufDataset( + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this(null, dataPlatformUrn, datasetUrn, graph, schema, auditStamp, fabricType); + } + + public ProtobufDataset( + ProtobufDataset.Builder builder, + DataPlatformUrn dataPlatformUrn, + DatasetUrn datasetUrn, + ProtobufGraph graph, + String schema, + AuditStamp auditStamp, + FabricType fabricType) { + this.builder = builder; + this.schemaSource = Optional.ofNullable(schema); + this.auditStamp = auditStamp; + this.graph = graph; + this.subType = Optional.empty(); + + // Default - non-protobuf extension + fieldVisitor = new SchemaFieldVisitor(); + mcpwVisitors = List.of(); + + this.datasetUrn = + datasetUrn != null + ? datasetUrn + : new DatasetUrn(dataPlatformUrn, this.graph.getFullName(), fabricType); + this.contextBuilder = + VisitContext.builder().datasetUrn(this.datasetUrn).auditStamp(this.auditStamp); + } + + public ProtobufDataset setMetadataChangeProposalVisitors( + List>> + visitors) { + this.mcpwVisitors = visitors; + return this; + } + + public ProtobufDataset setDatasetVisitor(DatasetVisitor datasetVisitor) { + this.datasetVisitor = datasetVisitor; + return this; + } + + public ProtobufDataset setFieldVisitor(ProtobufModelVisitor> visitor) { + this.fieldVisitor = visitor; + return this; + } + + public ProtobufDataset setSubType(String subType) { + this.subType = Optional.ofNullable(subType); + return this; + } + + public ProtobufDataset.Builder toBuilder() { + return builder; + } + + public ProtobufGraph getGraph() { + return graph; + } + + public AuditStamp getAuditStamp() { + return auditStamp; + } + + public DatasetUrn getDatasetUrn() { + return datasetUrn; + } + + public Stream>> + getAllMetadataChangeProposals() { + return Stream.of(getVisitorMCPs(), getDatasetMCPs()); + } + + public List> getVisitorMCPs() { + return graph.accept(contextBuilder, mcpwVisitors).collect(Collectors.toList()); + } + + public List> getDatasetMCPs() { + Stream> mcpStream = + Stream.concat( + this.graph.accept(contextBuilder, List.of(datasetVisitor)), Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - getSchemaMetadata(), "schemaMetadata"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - new Status().setRemoved(false), "status"))); - - if (this.subType.isPresent()) { - SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); - mcpStream = Stream.concat(mcpStream, - Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn.toString(), ChangeType.UPSERT, - subTypes, "subTypes"))); - } - return mcpStream.collect(Collectors.toList()); - } - - public SchemaMetadata getSchemaMetadata() { - SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); - schemaSource.ifPresent(schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); - - List schemaFields = graph.accept(contextBuilder, List.of(fieldVisitor)) - .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); - - return new SchemaMetadata() - .setSchemaName(graph.getFullName()) - .setPlatform(datasetUrn.getPlatformEntity()) - .setCreated(auditStamp) - .setLastModified(auditStamp) - .setVersion(graph.getMajorVersion()) - .setHash(graph.getHash()) - .setPlatformSchema(platformSchema) - .setFields(new SchemaFieldArray(schemaFields)); + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + getSchemaMetadata(), + "schemaMetadata"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + new Status().setRemoved(false), + "status"))); + + if (this.subType.isPresent()) { + SubTypes subTypes = new SubTypes().setTypeNames(new StringArray(this.subType.get())); + mcpStream = + Stream.concat( + mcpStream, + Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn.toString(), + ChangeType.UPSERT, + subTypes, + "subTypes"))); } - - public static final Comparator> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = Comparator.comparing(Pair::getSecond); - public static final Comparator> COMPARE_BY_FIELD_PATH = Comparator - .comparing(p -> p.getFirst().getFieldPath()); + return mcpStream.collect(Collectors.toList()); + } + + public SchemaMetadata getSchemaMetadata() { + SchemaMetadata.PlatformSchema platformSchema = new SchemaMetadata.PlatformSchema(); + schemaSource.ifPresent( + schemaStr -> platformSchema.setKafkaSchema(new KafkaSchema().setDocumentSchema(schemaStr))); + + List schemaFields = + graph + .accept(contextBuilder, List.of(fieldVisitor)) + .sorted(COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); + + return new SchemaMetadata() + .setSchemaName(graph.getFullName()) + .setPlatform(datasetUrn.getPlatformEntity()) + .setCreated(auditStamp) + .setLastModified(auditStamp) + .setVersion(graph.getMajorVersion()) + .setHash(graph.getHash()) + .setPlatformSchema(platformSchema) + .setFields(new SchemaFieldArray(schemaFields)); + } + + public static final Comparator> COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT = + Comparator.comparing(Pair::getSecond); + public static final Comparator> COMPARE_BY_FIELD_PATH = + Comparator.comparing(p -> p.getFirst().getFieldPath()); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java index 5f5cfaa15cf41..ef5bc52aaee7a 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/ProtobufUtils.java @@ -5,7 +5,6 @@ import com.google.protobuf.Descriptors; import com.google.protobuf.ExtensionRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; @@ -20,176 +19,211 @@ import java.util.stream.Stream; public class ProtobufUtils { - private ProtobufUtils() { } - - public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { - String orig = Stream.concat(location.getLeadingDetachedCommentsList().stream(), - Stream.of(location.getLeadingComments(), location.getTrailingComments())) - .filter(Objects::nonNull) - .flatMap(line -> Arrays.stream(line.split("\n"))) - .map(line -> line.replaceFirst("^[*/ ]+", "")) - .collect(Collectors.joining("\n")) - .trim(); - - /* - * Sometimes DataHub doesn't like these strings. Not sure if its DataHub - * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 - * - * We essentially smash utf8 chars to ascii here - */ - return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); - } + private ProtobufUtils() {} + + public static String collapseLocationComments(DescriptorProtos.SourceCodeInfo.Location location) { + String orig = + Stream.concat( + location.getLeadingDetachedCommentsList().stream(), + Stream.of(location.getLeadingComments(), location.getTrailingComments())) + .filter(Objects::nonNull) + .flatMap(line -> Arrays.stream(line.split("\n"))) + .map(line -> line.replaceFirst("^[*/ ]+", "")) + .collect(Collectors.joining("\n")) + .trim(); /* - * Reflection used to prevent an exception deep inside the protobuf library due to a getter method - * mutating the json name field and causing an equality check to fail between an instance that has and has not - * had the getter called. - * - * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 - * - * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) - * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) - * at java.base/java.util.TreeMap.put(TreeMap.java:566) - * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) - * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) - * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * Sometimes DataHub doesn't like these strings. Not sure if its DataHub + * or protobuf issue: https://github.com/protocolbuffers/protobuf/issues/4691 * + * We essentially smash utf8 chars to ascii here */ - private static final Method FIELD_OPT_EXT_FIELDS_METHOD; - private static final Method FIELD_OPT_ALL_FIELD_METHOD; - private static final Method MSG_OPT_EXT_FIELDS_METHOD; - private static final Method MSG_OPT_ALL_FIELD_METHOD; - static { - try { - FIELD_OPT_EXT_FIELDS_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - FIELD_OPT_ALL_FIELD_METHOD = DescriptorProtos.FieldOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); - - MSG_OPT_EXT_FIELDS_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass() - .getDeclaredMethod("getExtensionFields"); - MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); - - MSG_OPT_ALL_FIELD_METHOD = DescriptorProtos.MessageOptions.class.getSuperclass().getSuperclass() - .getDeclaredMethod("getAllFieldsMutable", boolean.class); - MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } + return new String(orig.getBytes(StandardCharsets.ISO_8859_1)); + } + + /* + * Reflection used to prevent an exception deep inside the protobuf library due to a getter method + * mutating the json name field and causing an equality check to fail between an instance that has and has not + * had the getter called. + * + * https://github.com/protocolbuffers/protobuf/blob/main/java/core/src/main/java/com/google/protobuf/Descriptors.java#L1105 + * + * java.lang.IllegalArgumentException: FieldDescriptors can only be compared to other FieldDescriptors for fields of the same message type. + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1344) + * at com.google.protobuf.Descriptors$FieldDescriptor.compareTo(Descriptors.java:1057) + * at java.base/java.util.TreeMap.put(TreeMap.java:566) + * at java.base/java.util.AbstractMap.putAll(AbstractMap.java:281) + * at java.base/java.util.TreeMap.putAll(TreeMap.java:325) + * at com.google.protobuf.GeneratedMessageV3$ExtendableMessage.getAllFields(GeneratedMessageV3.java:1240) + * + */ + private static final Method FIELD_OPT_EXT_FIELDS_METHOD; + private static final Method FIELD_OPT_ALL_FIELD_METHOD; + private static final Method MSG_OPT_EXT_FIELDS_METHOD; + private static final Method MSG_OPT_ALL_FIELD_METHOD; + + static { + try { + FIELD_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + FIELD_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + FIELD_OPT_ALL_FIELD_METHOD = + DescriptorProtos.FieldOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + FIELD_OPT_ALL_FIELD_METHOD.setAccessible(true); + + MSG_OPT_EXT_FIELDS_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getDeclaredMethod("getExtensionFields"); + MSG_OPT_EXT_FIELDS_METHOD.setAccessible(true); + + MSG_OPT_ALL_FIELD_METHOD = + DescriptorProtos.MessageOptions.class + .getSuperclass() + .getSuperclass() + .getDeclaredMethod("getAllFieldsMutable", boolean.class); + MSG_OPT_ALL_FIELD_METHOD.setAccessible(true); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); } - - public static List> getFieldOptions(DescriptorProtos.FieldDescriptorProto fieldProto) { - try { - LinkedList> options = new LinkedList<>(); - - options.addAll(((Map) FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map) FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List> getFieldOptions( + DescriptorProtos.FieldDescriptorProto fieldProto) { + try { + LinkedList> options = new LinkedList<>(); + + options.addAll( + ((Map) + FIELD_OPT_EXT_FIELDS_METHOD.invoke(fieldProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map) + FIELD_OPT_ALL_FIELD_METHOD.invoke(fieldProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static List> getMessageOptions(DescriptorProtos.DescriptorProto messageProto) { - try { - LinkedList> options = new LinkedList<>(); - - options.addAll(((Map) MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - options.addAll(((Map) MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), - false)) - .entrySet() - .stream() - .map(e -> Pair.of(e.getKey(), e.getValue())) - .collect(Collectors.toList())); - - return options; - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } + } + + public static List> getMessageOptions( + DescriptorProtos.DescriptorProto messageProto) { + try { + LinkedList> options = new LinkedList<>(); + + options.addAll( + ((Map) + MSG_OPT_EXT_FIELDS_METHOD.invoke(messageProto.getOptions())) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + options.addAll( + ((Map) + MSG_OPT_ALL_FIELD_METHOD.invoke(messageProto.getOptions(), false)) + .entrySet().stream() + .map(e -> Pair.of(e.getKey(), e.getValue())) + .collect(Collectors.toList())); + + return options; + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - - public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { - ExtensionRegistry registry = ExtensionRegistry.newInstance(); - Map descriptorProtoMap = fileSet.getFileList().stream() - .collect(Collectors.toMap(DescriptorProtos.FileDescriptorProto::getName, Function.identity())); - Map descriptorCache = new HashMap<>(); - - fileSet.getFileList().forEach(fdp -> { - try { - Descriptors.FileDescriptor file = descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); - Stream.concat(file.getExtensions().stream(), file.getMessageTypes().stream().flatMap(msg -> msg.getExtensions().stream())) - .forEach(ext -> addToRegistry(fdp, ext, registry)); - } catch (Descriptors.DescriptorValidationException e) { + } + + public static ExtensionRegistry buildRegistry(DescriptorProtos.FileDescriptorSet fileSet) { + ExtensionRegistry registry = ExtensionRegistry.newInstance(); + Map descriptorProtoMap = + fileSet.getFileList().stream() + .collect( + Collectors.toMap( + DescriptorProtos.FileDescriptorProto::getName, Function.identity())); + Map descriptorCache = new HashMap<>(); + + fileSet + .getFileList() + .forEach( + fdp -> { + try { + Descriptors.FileDescriptor file = + descriptorFromProto(fdp, descriptorProtoMap, descriptorCache); + Stream.concat( + file.getExtensions().stream(), + file.getMessageTypes().stream() + .flatMap(msg -> msg.getExtensions().stream())) + .forEach(ext -> addToRegistry(fdp, ext, registry)); + } catch (Descriptors.DescriptorValidationException e) { e.printStackTrace(); - } - }); - return registry; + } + }); + return registry; + } + + private static void addToRegistry( + DescriptorProtos.FileDescriptorProto fileDescriptorProto, + Descriptors.FieldDescriptor fieldDescriptor, + ExtensionRegistry registry) { + if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + registry.add(fieldDescriptor); + } else { + fileDescriptorProto.getMessageTypeList().stream() + .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) + .findFirst() + .ifPresent( + messageType -> + registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); + fieldDescriptor.getMessageType().getFields().stream() + .filter(Descriptors.FieldDescriptor::isExtension) + .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); } - - private static void addToRegistry(DescriptorProtos.FileDescriptorProto fileDescriptorProto, - Descriptors.FieldDescriptor fieldDescriptor, ExtensionRegistry registry) { - if (fieldDescriptor.getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - registry.add(fieldDescriptor); - } else { - fileDescriptorProto.getMessageTypeList().stream() - .filter(typ -> typ.getName().equals(fieldDescriptor.getMessageType().getName())) - .findFirst().ifPresent(messageType -> registry.add(fieldDescriptor, messageType.getDefaultInstanceForType())); - fieldDescriptor.getMessageType().getFields() - .stream().filter(Descriptors.FieldDescriptor::isExtension) - .forEach(f -> addToRegistry(fileDescriptorProto, f, registry)); - } + } + + /** + * Recursively constructs file descriptors for all dependencies of the supplied proto and returns + * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, + * reuse the descriptorCache argument across calls. + */ + private static Descriptors.FileDescriptor descriptorFromProto( + DescriptorProtos.FileDescriptorProto descriptorProto, + Map descriptorProtoIndex, + Map descriptorCache) + throws Descriptors.DescriptorValidationException { + // First, check the cache. + String descriptorName = descriptorProto.getName(); + if (descriptorCache.containsKey(descriptorName)) { + return descriptorCache.get(descriptorName); } - /** - * Recursively constructs file descriptors for all dependencies of the supplied proto and returns - * a {@link Descriptors.FileDescriptor} for the supplied proto itself. For maximal efficiency, reuse the - * descriptorCache argument across calls. - */ - private static Descriptors.FileDescriptor descriptorFromProto( - DescriptorProtos.FileDescriptorProto descriptorProto, - Map descriptorProtoIndex, - Map descriptorCache) throws Descriptors.DescriptorValidationException { - // First, check the cache. - String descriptorName = descriptorProto.getName(); - if (descriptorCache.containsKey(descriptorName)) { - return descriptorCache.get(descriptorName); - } - - // Then, fetch all the required dependencies recursively. - ImmutableList.Builder dependencies = ImmutableList.builder(); - for (String dependencyName : descriptorProto.getDependencyList()) { - if (!descriptorProtoIndex.containsKey(dependencyName)) { - throw new IllegalArgumentException("Could not find dependency: " + dependencyName); - } - DescriptorProtos.FileDescriptorProto dependencyProto = descriptorProtoIndex.get(dependencyName); - dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); - } - - // Finally, construct the actual descriptor. - Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; - Descriptors.FileDescriptor descript = Descriptors.FileDescriptor.buildFrom(descriptorProto, dependencies.build().toArray(empty), false); - descriptorCache.put(descript.getName(), descript); - return descript; + // Then, fetch all the required dependencies recursively. + ImmutableList.Builder dependencies = ImmutableList.builder(); + for (String dependencyName : descriptorProto.getDependencyList()) { + if (!descriptorProtoIndex.containsKey(dependencyName)) { + throw new IllegalArgumentException("Could not find dependency: " + dependencyName); + } + DescriptorProtos.FileDescriptorProto dependencyProto = + descriptorProtoIndex.get(dependencyName); + dependencies.add(descriptorFromProto(dependencyProto, descriptorProtoIndex, descriptorCache)); } + // Finally, construct the actual descriptor. + Descriptors.FileDescriptor[] empty = new Descriptors.FileDescriptor[0]; + Descriptors.FileDescriptor descript = + Descriptors.FileDescriptor.buildFrom( + descriptorProto, dependencies.build().toArray(empty), false); + descriptorCache.put(descript.getName(), descript); + return descript; + } } - diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java index 7926ba0702762..49ecb7ec2aedf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/FieldTypeEdge.java @@ -5,51 +5,48 @@ import org.jgrapht.graph.DefaultDirectedGraph; import org.jgrapht.graph.DefaultEdge; - @Builder @Getter public class FieldTypeEdge extends DefaultEdge { - @Builder.Default - private final String type = ""; - @Builder.Default - private final boolean isMessageType = false; - private final transient ProtobufElement edgeSource; - private final transient ProtobufElement edgeTarget; - - public FieldTypeEdge inGraph(DefaultDirectedGraph g) { - g.addEdge(edgeSource, edgeTarget, this); - return this; + @Builder.Default private final String type = ""; + @Builder.Default private final boolean isMessageType = false; + private final transient ProtobufElement edgeSource; + private final transient ProtobufElement edgeTarget; + + public FieldTypeEdge inGraph(DefaultDirectedGraph g) { + g.addEdge(edgeSource, edgeTarget, this); + return this; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - FieldTypeEdge that = (FieldTypeEdge) o; + FieldTypeEdge that = (FieldTypeEdge) o; - if (isMessageType() != that.isMessageType()) { - return false; - } - if (!getType().equals(that.getType())) { - return false; - } - if (!getEdgeSource().equals(that.getEdgeSource())) { - return false; - } - return getEdgeTarget().equals(that.getEdgeTarget()); + if (isMessageType() != that.isMessageType()) { + return false; } - - @Override - public int hashCode() { - int result = getType().hashCode(); - result = 31 * result + (isMessageType() ? 1 : 0); - result = 31 * result + getEdgeSource().hashCode(); - result = 31 * result + getEdgeTarget().hashCode(); - return result; + if (!getType().equals(that.getType())) { + return false; + } + if (!getEdgeSource().equals(that.getEdgeSource())) { + return false; } + return getEdgeTarget().equals(that.getEdgeTarget()); + } + + @Override + public int hashCode() { + int result = getType().hashCode(); + result = 31 * result + (isMessageType() ? 1 : 0); + result = 31 * result + getEdgeSource().hashCode(); + result = 31 * result + getEdgeTarget().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java index 91c76fe16b73f..e47e804763ecf 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufElement.java @@ -5,28 +5,33 @@ import com.google.protobuf.DescriptorProtos.SourceCodeInfo; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.List; import java.util.stream.Stream; - public interface ProtobufElement { - String name(); - String fullName(); - String nativeType(); - String comment(); - String fieldPathType(); - - FileDescriptorProto fileProto(); - DescriptorProto messageProto(); - - default Stream messageLocations() { - List fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER - && messageProto() == fileProto().getMessageType(loc.getPath(1))); - } - - Stream accept(ProtobufModelVisitor v, VisitContext context); + String name(); + + String fullName(); + + String nativeType(); + + String comment(); + + String fieldPathType(); + + FileDescriptorProto fileProto(); + + DescriptorProto messageProto(); + + default Stream messageLocations() { + List fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER + && messageProto() == fileProto().getMessageType(loc.getPath(1))); + } + + Stream accept(ProtobufModelVisitor v, VisitContext context); } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java index ff894112d0d51..3d4e170939455 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufEnum.java @@ -6,82 +6,81 @@ import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufEnum extends ProtobufMessage { - private final EnumDescriptorProto enumProto; - - @Builder(builderMethodName = "enumBuilder") - public ProtobufEnum(FileDescriptorProto fileProto, - DescriptorProto messageProto, - EnumDescriptorProto enumProto) { - super(messageProto, null, fileProto); - this.enumProto = enumProto; - } - - @Override - public String name() { - return enumProto.getName(); - } - - @Override - public String fieldPathType() { - return "[type=enum]"; + private final EnumDescriptorProto enumProto; + + @Builder(builderMethodName = "enumBuilder") + public ProtobufEnum( + FileDescriptorProto fileProto, DescriptorProto messageProto, EnumDescriptorProto enumProto) { + super(messageProto, null, fileProto); + this.enumProto = enumProto; + } + + @Override + public String name() { + return enumProto.getName(); + } + + @Override + public String fieldPathType() { + return "[type=enum]"; + } + + @Override + public String nativeType() { + return "enum"; + } + + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + } + + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER + && enumProto == messageProto().getEnumType(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + @Override + public String toString() { + return String.format("ProtobufEnum[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String nativeType() { - return "enum"; + if (o == null || getClass() != o.getClass()) { + return false; } - - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())); + if (!super.equals(o)) { + return false; } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER - && enumProto == messageProto().getEnumType(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } - - @Override - public String toString() { - return String.format("ProtobufEnum[%s]", fullName()); - } + ProtobufEnum that = (ProtobufEnum) o; - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } + return getEnumProto().equals(that.getEnumProto()); + } - ProtobufEnum that = (ProtobufEnum) o; - - return getEnumProto().equals(that.getEnumProto()); - } - - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + getEnumProto().hashCode(); - return result; - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + getEnumProto().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java index d890c373f1299..5bb41017488f3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufField.java @@ -18,152 +18,159 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; - import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Getter; @Builder(toBuilder = true) @Getter @AllArgsConstructor public class ProtobufField implements ProtobufElement { - private final ProtobufMessage protobufMessage; - private final FieldDescriptorProto fieldProto; - private final String nativeType; - private final String fieldPathType; - private final Boolean isMessageType; - private final SchemaFieldDataType schemaFieldDataType; - private final Boolean isNestedType; - - public OneofDescriptorProto oneOfProto() { - if (fieldProto.hasOneofIndex()) { - return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); - } - return null; - } - - @Override - public FileDescriptorProto fileProto() { - return protobufMessage.fileProto(); - } - - @Override - public DescriptorProto messageProto() { - return protobufMessage.messageProto(); - } - - public String parentMessageName() { - return protobufMessage.fullName(); - } - - @Override - public String name() { - return fieldProto.getName(); - } - - @Override - public String fullName() { - return String.join(".", parentMessageName(), name()); - } - - public String getNativeType() { - return nativeType(); - } - - public int getNumber() { - return fieldProto.getNumber(); + private final ProtobufMessage protobufMessage; + private final FieldDescriptorProto fieldProto; + private final String nativeType; + private final String fieldPathType; + private final Boolean isMessageType; + private final SchemaFieldDataType schemaFieldDataType; + private final Boolean isNestedType; + + public OneofDescriptorProto oneOfProto() { + if (fieldProto.hasOneofIndex()) { + return protobufMessage.messageProto().getOneofDecl(fieldProto.getOneofIndex()); } - - @Override - public String nativeType() { - return Optional.ofNullable(nativeType).orElseGet(() -> { - if (fieldProto.getTypeName().isEmpty()) { + return null; + } + + @Override + public FileDescriptorProto fileProto() { + return protobufMessage.fileProto(); + } + + @Override + public DescriptorProto messageProto() { + return protobufMessage.messageProto(); + } + + public String parentMessageName() { + return protobufMessage.fullName(); + } + + @Override + public String name() { + return fieldProto.getName(); + } + + @Override + public String fullName() { + return String.join(".", parentMessageName(), name()); + } + + public String getNativeType() { + return nativeType(); + } + + public int getNumber() { + return fieldProto.getNumber(); + } + + @Override + public String nativeType() { + return Optional.ofNullable(nativeType) + .orElseGet( + () -> { + if (fieldProto.getTypeName().isEmpty()) { return fieldProto.getType().name().split("_")[1].toLowerCase(); - } else { + } else { return fieldProto.getTypeName().replaceFirst("^[.]", ""); - } - }); - } - - @Override - public String fieldPathType() { - return Optional.ofNullable(fieldPathType).orElseGet(() -> { - final String pathType; - - switch (fieldProto.getType()) { + } + }); + } + + @Override + public String fieldPathType() { + return Optional.ofNullable(fieldPathType) + .orElseGet( + () -> { + final String pathType; + + switch (fieldProto.getType()) { case TYPE_DOUBLE: - pathType = "double"; - break; + pathType = "double"; + break; case TYPE_FLOAT: - pathType = "float"; - break; + pathType = "float"; + break; case TYPE_SFIXED64: case TYPE_FIXED64: case TYPE_UINT64: case TYPE_INT64: case TYPE_SINT64: - pathType = "long"; - break; + pathType = "long"; + break; case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_INT32: case TYPE_UINT32: case TYPE_SINT32: - pathType = "int"; - break; + pathType = "int"; + break; case TYPE_BYTES: - pathType = "bytes"; - break; + pathType = "bytes"; + break; case TYPE_ENUM: - pathType = "enum"; - break; + pathType = "enum"; + break; case TYPE_BOOL: - pathType = "boolean"; - break; + pathType = "boolean"; + break; case TYPE_STRING: - pathType = "string"; - break; + pathType = "string"; + break; case TYPE_GROUP: case TYPE_MESSAGE: - pathType = nativeType().replace(".", "_"); - break; + pathType = nativeType().replace(".", "_"); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => FieldPathType %s", fieldProto.getType())); - } + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => FieldPathType %s", + fieldProto.getType())); + } - StringArray fieldPath = new StringArray(); + StringArray fieldPath = new StringArray(); - if (schemaFieldDataType().getType().isArrayType()) { + if (schemaFieldDataType().getType().isArrayType()) { fieldPath.add("[type=array]"); - } + } - fieldPath.add(String.format("[type=%s]", pathType)); + fieldPath.add(String.format("[type=%s]", pathType)); - return String.join(".", fieldPath); - }); - } + return String.join(".", fieldPath); + }); + } - public boolean isMessage() { - return Optional.ofNullable(isMessageType).orElseGet(() -> - fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); - } + public boolean isMessage() { + return Optional.ofNullable(isMessageType) + .orElseGet(() -> fieldProto.getType().equals(FieldDescriptorProto.Type.TYPE_MESSAGE)); + } - public int sortWeight() { - return messageProto().getFieldList().indexOf(fieldProto) + 1; - } + public int sortWeight() { + return messageProto().getFieldList().indexOf(fieldProto) + 1; + } - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return Optional.ofNullable(schemaFieldDataType).orElseGet(() -> { - final SchemaFieldDataType.Type fieldType; + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return Optional.ofNullable(schemaFieldDataType) + .orElseGet( + () -> { + final SchemaFieldDataType.Type fieldType; - switch (fieldProto.getType()) { + switch (fieldProto.getType()) { case TYPE_DOUBLE: case TYPE_FLOAT: case TYPE_INT64: @@ -172,139 +179,150 @@ public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { case TYPE_UINT32: case TYPE_SINT32: case TYPE_SINT64: - fieldType = SchemaFieldDataType.Type.create(new NumberType()); - break; + fieldType = SchemaFieldDataType.Type.create(new NumberType()); + break; case TYPE_GROUP: case TYPE_MESSAGE: - fieldType = SchemaFieldDataType.Type.create(new RecordType()); - break; + fieldType = SchemaFieldDataType.Type.create(new RecordType()); + break; case TYPE_BYTES: - fieldType = SchemaFieldDataType.Type.create(new BytesType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BytesType()); + break; case TYPE_ENUM: - fieldType = SchemaFieldDataType.Type.create(new EnumType()); - break; + fieldType = SchemaFieldDataType.Type.create(new EnumType()); + break; case TYPE_BOOL: - fieldType = SchemaFieldDataType.Type.create(new BooleanType()); - break; + fieldType = SchemaFieldDataType.Type.create(new BooleanType()); + break; case TYPE_STRING: - fieldType = SchemaFieldDataType.Type.create(new StringType()); - break; + fieldType = SchemaFieldDataType.Type.create(new StringType()); + break; case TYPE_FIXED64: case TYPE_FIXED32: case TYPE_SFIXED32: case TYPE_SFIXED64: - fieldType = SchemaFieldDataType.Type.create(new FixedType()); - break; + fieldType = SchemaFieldDataType.Type.create(new FixedType()); + break; default: - throw new IllegalStateException(String.format("Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", fieldProto.getType())); - } - - if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))); - } - - return new SchemaFieldDataType().setType(fieldType); - }); - } - - @Override - public Stream messageLocations() { - List fileLocations = fileProto().getSourceCodeInfo().getLocationList(); - return fileLocations.stream() - .filter(loc -> loc.getPathCount() > 1 - && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + throw new IllegalStateException( + String.format( + "Unexpected FieldDescriptorProto => SchemaFieldDataType: %s", + fieldProto.getType())); + } + + if (fieldProto.getLabel().equals(FieldDescriptorProto.Label.LABEL_REPEATED)) { + return new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))); + } + + return new SchemaFieldDataType().setType(fieldType); + }); + } + + @Override + public Stream messageLocations() { + List fileLocations = fileProto().getSourceCodeInfo().getLocationList(); + return fileLocations.stream() + .filter( + loc -> + loc.getPathCount() > 1 + && loc.getPath(0) == FileDescriptorProto.MESSAGE_TYPE_FIELD_NUMBER); + } + + @Override + public String comment() { + return messageLocations() + .filter(location -> location.getPathCount() > 3) + .filter( + location -> + !ProtobufUtils.collapseLocationComments(location).isEmpty() + && !isEnumType(location.getPathList())) + .filter( + location -> { + List pathList = location.getPathList(); + DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); + + if (!isNestedType + && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER + && fieldProto == messageType.getField(location.getPath(3))) { + return true; + } else if (isNestedType + && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER + && fieldProto == getNestedTypeFields(pathList, messageType)) { + return true; + } + return false; + }) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } + + private FieldDescriptorProto getNestedTypeFields( + List pathList, DescriptorProto messageType) { + int pathSize = pathList.size(); + List nestedValues = new ArrayList<>(pathSize); + + for (int index = 0; index < pathSize; index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { + nestedValues.add(pathList.get(index + 1)); + } } - @Override - public String comment() { - return messageLocations() - .filter(location -> location.getPathCount() > 3) - .filter(location -> !ProtobufUtils.collapseLocationComments(location).isEmpty() - && !isEnumType(location.getPathList())) - .filter(location -> { - List pathList = location.getPathList(); - DescriptorProto messageType = fileProto().getMessageType(pathList.get(1)); - - if (!isNestedType - && location.getPath(2) == DescriptorProto.FIELD_FIELD_NUMBER - && fieldProto == messageType.getField(location.getPath(3))) { - return true; - } else if (isNestedType - && location.getPath(2) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER - && fieldProto == getNestedTypeFields(pathList, messageType)) { - return true; - } - return false; - }) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); + for (Integer value : nestedValues) { + messageType = messageType.getNestedType(value); } - private FieldDescriptorProto getNestedTypeFields(List pathList, DescriptorProto messageType) { - int pathSize = pathList.size(); - List nestedValues = new ArrayList<>(pathSize); - - for (int index = 0; index < pathSize; index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.NESTED_TYPE_FIELD_NUMBER) { - nestedValues.add(pathList.get(index + 1)); - } - } - - for (Integer value : nestedValues) { - messageType = messageType.getNestedType(value); - } - - if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER - && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { - return messageType.getField(pathList.get(pathSize - 1)); - } else { - return null; - } + if (pathList.get(pathSize - 2) == DescriptorProto.FIELD_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_RANGE_FIELD_NUMBER + && pathList.get(pathSize - 1) != DescriptorProto.RESERVED_NAME_FIELD_NUMBER) { + return messageType.getField(pathList.get(pathSize - 1)); + } else { + return null; } - - private boolean isEnumType(List pathList) { - for (int index = 0; index < pathList.size(); index++) { - if (index > 1 - && index % 2 == 0 - && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { - return true; - } - } - return false; + } + + private boolean isEnumType(List pathList) { + for (int index = 0; index < pathList.size(); index++) { + if (index > 1 + && index % 2 == 0 + && pathList.get(index) == DescriptorProto.ENUM_TYPE_FIELD_NUMBER) { + return true; + } } - - @Override - public Stream accept(ProtobufModelVisitor visitor, VisitContext context) { - return visitor.visitField(this, context); + return false; + } + + @Override + public Stream accept(ProtobufModelVisitor visitor, VisitContext context) { + return visitor.visitField(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufField[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return String.format("ProtobufField[%s]", fullName()); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } + ProtobufElement that = (ProtobufElement) o; - ProtobufElement that = (ProtobufElement) o; + return fullName().equals(that.fullName()); + } - return fullName().equals(that.fullName()); - } - - @Override - public int hashCode() { - return fullName().hashCode(); - } + @Override + public int hashCode() { + return fullName().hashCode(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java index ae2319af85988..2f8c885de0e96 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufGraph.java @@ -6,10 +6,6 @@ import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.jgrapht.GraphPath; -import org.jgrapht.alg.shortestpath.AllDirectedPaths; -import org.jgrapht.graph.DefaultDirectedGraph; - import java.util.Collection; import java.util.HashSet; import java.util.List; @@ -18,374 +14,476 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - +import org.jgrapht.GraphPath; +import org.jgrapht.alg.shortestpath.AllDirectedPaths; +import org.jgrapht.graph.DefaultDirectedGraph; public class ProtobufGraph extends DefaultDirectedGraph { - private final transient ProtobufMessage rootProtobufMessage; - private final transient AllDirectedPaths directedPaths; - private final transient ExtensionRegistry registry; - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) throws InvalidProtocolBufferException { - this(fileSet, null, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) throws InvalidProtocolBufferException { - this(fileSet, messageName, null, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) throws InvalidProtocolBufferException { - this(fileSet, messageName, relativeFilename, true); - } - - public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName, String filename, - boolean flattenGoogleWrapped) throws InvalidProtocolBufferException { - super(FieldTypeEdge.class); - this.registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetExtended = DescriptorProtos.FileDescriptorSet - .parseFrom(fileSet.toByteArray(), this.registry); - buildProtobufGraph(fileSetExtended); - if (flattenGoogleWrapped) { - flattenGoogleWrapped(); - } - - if (messageName != null) { - this.rootProtobufMessage = findMessage(messageName); - } else { - DescriptorProtos.FileDescriptorProto lastFile = fileSetExtended.getFileList() - .stream().filter(f -> filename != null && filename.endsWith(f.getName())) - .findFirst().orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); - - if (filename != null) { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElse(autodetectSingleMessage(lastFile) - .orElse(autodetectLocalFileRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect protobuf Message.")))); - } else { - this.rootProtobufMessage = autodetectRootMessage(lastFile) - .orElseThrow(() -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); - } - } - - this.directedPaths = new AllDirectedPaths<>(this); - } - - public List> getAllPaths(ProtobufElement a, ProtobufElement b) { - return directedPaths.getAllPaths(a, b, true, null); - } - - public ExtensionRegistry getRegistry() { - return registry; - } - - public String getFullName() { - return rootProtobufMessage.fullName(); - } - - public int getMajorVersion() { - return rootProtobufMessage.majorVersion(); - } - - public String getComment() { - return rootProtobufMessage.comment(); - } - - public ProtobufMessage root() { - return rootProtobufMessage; - } - - - public > Stream accept(VisitContext.VisitContextBuilder contextBuilder, Collection visitors) { - VisitContext context = Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); - return accept(context, visitors); - } - - public > Stream accept(VisitContext context, Collection visitors) { - return Stream.concat( - visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), - vertexSet().stream().flatMap(vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context))) - ); - } - - protected Optional autodetectRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).isEmpty() - && outgoingEdgesOf(v).stream() - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> e.getEdgeSource().equals(v))) // all the incoming edges on the child vertices should be self - .map(v -> (ProtobufMessage) v) - .findFirst(); + private final transient ProtobufMessage rootProtobufMessage; + private final transient AllDirectedPaths directedPaths; + private final transient ExtensionRegistry registry; + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) + throws InvalidProtocolBufferException { + this(fileSet, null, null, true); + } + + public ProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet, String messageName) + throws InvalidProtocolBufferException { + this(fileSet, messageName, null, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, String messageName, String relativeFilename) + throws InvalidProtocolBufferException { + this(fileSet, messageName, relativeFilename, true); + } + + public ProtobufGraph( + DescriptorProtos.FileDescriptorSet fileSet, + String messageName, + String filename, + boolean flattenGoogleWrapped) + throws InvalidProtocolBufferException { + super(FieldTypeEdge.class); + this.registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetExtended = + DescriptorProtos.FileDescriptorSet.parseFrom(fileSet.toByteArray(), this.registry); + buildProtobufGraph(fileSetExtended); + if (flattenGoogleWrapped) { + flattenGoogleWrapped(); } - protected Optional autodetectSingleMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && targetFile.getMessageTypeCount() == 1) - .map(v -> (ProtobufMessage) v) - .findFirst(); + if (messageName != null) { + this.rootProtobufMessage = findMessage(messageName); + } else { + DescriptorProtos.FileDescriptorProto lastFile = + fileSetExtended.getFileList().stream() + .filter(f -> filename != null && filename.endsWith(f.getName())) + .findFirst() + .orElse(fileSetExtended.getFile(fileSetExtended.getFileCount() - 1)); + + if (filename != null) { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElse( + autodetectSingleMessage(lastFile) + .orElse( + autodetectLocalFileRootMessage(lastFile) + .orElseThrow( + () -> + new IllegalArgumentException( + "Cannot autodetect protobuf Message.")))); + } else { + this.rootProtobufMessage = + autodetectRootMessage(lastFile) + .orElseThrow( + () -> new IllegalArgumentException("Cannot autodetect root protobuf Message.")); + } } - protected Optional autodetectLocalFileRootMessage(DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { - return vertexSet().stream() - .filter(v -> // incoming edges of fields - targetFile.equals(v.fileProto()) - && v instanceof ProtobufMessage - && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) - && outgoingEdgesOf(v).stream() // all the incoming edges on the child vertices should be self within target file - .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) - .allMatch(e -> !e.getEdgeSource().fileProto().equals(targetFile) || e.getEdgeSource().equals(v))) - .map(v -> (ProtobufMessage) v) - .findFirst(); - } + this.directedPaths = new AllDirectedPaths<>(this); + } + + public List> getAllPaths( + ProtobufElement a, ProtobufElement b) { + return directedPaths.getAllPaths(a, b, true, null); + } + + public ExtensionRegistry getRegistry() { + return registry; + } + + public String getFullName() { + return rootProtobufMessage.fullName(); + } + + public int getMajorVersion() { + return rootProtobufMessage.majorVersion(); + } + + public String getComment() { + return rootProtobufMessage.comment(); + } + + public ProtobufMessage root() { + return rootProtobufMessage; + } + + public > Stream accept( + VisitContext.VisitContextBuilder contextBuilder, Collection visitors) { + VisitContext context = + Optional.ofNullable(contextBuilder).orElse(VisitContext.builder()).graph(this).build(); + return accept(context, visitors); + } + + public > Stream accept( + VisitContext context, Collection visitors) { + return Stream.concat( + visitors.stream().flatMap(visitor -> visitor.visitGraph(context)), + vertexSet().stream() + .flatMap( + vertex -> visitors.stream().flatMap(visitor -> vertex.accept(visitor, context)))); + } + + protected Optional autodetectRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).isEmpty() + && outgoingEdgesOf(v).stream() + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + e.getEdgeSource() + .equals( + v))) // all the incoming edges on the child vertices should + // be self + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional autodetectSingleMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && targetFile.getMessageTypeCount() == 1) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + protected Optional autodetectLocalFileRootMessage( + DescriptorProtos.FileDescriptorProto targetFile) throws IllegalArgumentException { + return vertexSet().stream() + .filter( + v -> // incoming edges of fields + targetFile.equals(v.fileProto()) + && v instanceof ProtobufMessage + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource().fileProto().equals(targetFile)) + && outgoingEdgesOf(v) + .stream() // all the incoming edges on the child vertices should be self + // within target file + .flatMap(e -> incomingEdgesOf(e.getEdgeTarget()).stream()) + .allMatch( + e -> + !e.getEdgeSource().fileProto().equals(targetFile) + || e.getEdgeSource().equals(v))) + .map(v -> (ProtobufMessage) v) + .findFirst(); + } + + public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { + return (ProtobufMessage) + vertexSet().stream() + .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) + .findFirst() + .orElseThrow( + () -> + new IllegalArgumentException( + String.format("Cannot find protobuf Message %s", messageName))); + } + + private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { + // Attach non-nested fields to messages + fileSet + .getFileList() + .forEach( + fileProto -> + fileProto + .getMessageTypeList() + .forEach( + messageProto -> { + ProtobufMessage messageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .messageProto(messageProto) + .build(); + addVertex(messageVertex); + + // Handle nested fields + addNestedMessage(fileProto, messageProto); + + // Add enum types + addEnum(fileProto, messageProto); + + // handle normal fields and oneofs + messageProto + .getFieldList() + .forEach( + fieldProto -> { + ProtobufField fieldVertex = + ProtobufField.builder() + .protobufMessage(messageVertex) + .fieldProto(fieldProto) + .isNestedType(false) + .build(); + + // Add field vertex + addVertex(fieldVertex); + + if (fieldVertex.oneOfProto() != null) { + // Handle oneOf + addOneOf(messageVertex, fieldVertex); + } else { + // Add schema to field edge + linkMessageToField(messageVertex, fieldVertex); + } + }); + })); + + // attach field paths to root message + Map> fieldMap = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufField + && incomingEdgesOf(v).stream() + .noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) + .map(v -> (ProtobufField) v) + .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); + + edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()).stream() + .map(e -> (ProtobufField) e.getEdgeTarget()) + .forEach(f -> attachNestedMessageFields(fieldMap, f)); + } + + private void addEnum( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + messageProto + .getEnumTypeList() + .forEach( + enumProto -> { + ProtobufEnum enumVertex = + ProtobufEnum.enumBuilder() + .fileProto(fileProto) + .messageProto(messageProto) + .enumProto(enumProto) + .build(); + addVertex(enumVertex); + }); + } - public ProtobufMessage findMessage(String messageName) throws IllegalArgumentException { - return (ProtobufMessage) vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage && messageName.equals(v.fullName())) - .findFirst().orElseThrow(() -> new IllegalArgumentException(String.format("Cannot find protobuf Message %s", messageName))); + private void addNestedMessage( + DescriptorProtos.FileDescriptorProto fileProto, + DescriptorProtos.DescriptorProto messageProto) { + if (messageProto.getNestedTypeCount() < 1) { + return; } - private void buildProtobufGraph(DescriptorProtos.FileDescriptorSet fileSet) { - // Attach non-nested fields to messages - fileSet.getFileList().forEach(fileProto -> - fileProto.getMessageTypeList().forEach(messageProto -> { - - ProtobufMessage messageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .messageProto(messageProto) - .build(); - addVertex(messageVertex); - - // Handle nested fields - addNestedMessage(fileProto, messageProto); - - // Add enum types - addEnum(fileProto, messageProto); - - // handle normal fields and oneofs - messageProto.getFieldList().forEach(fieldProto -> { - ProtobufField fieldVertex = ProtobufField.builder() - .protobufMessage(messageVertex) - .fieldProto(fieldProto) - .isNestedType(false) + messageProto + .getNestedTypeList() + .forEach( + nestedMessageProto -> { + ProtobufMessage nestedMessageVertex = + ProtobufMessage.builder() + .fileProto(fileProto) + .parentMessageProto(messageProto) + .messageProto(nestedMessageProto) + .build(); + addVertex(nestedMessageVertex); + + nestedMessageProto + .getFieldList() + .forEach( + nestedFieldProto -> { + ProtobufField field = + ProtobufField.builder() + .protobufMessage(nestedMessageVertex) + .fieldProto(nestedFieldProto) + .isNestedType(true) .build(); // Add field vertex - addVertex(fieldVertex); - - if (fieldVertex.oneOfProto() != null) { - // Handle oneOf - addOneOf(messageVertex, fieldVertex); - } else { - // Add schema to field edge - linkMessageToField(messageVertex, fieldVertex); + addVertex(field); + + // Add schema to field edge + if (!field.isMessage()) { + FieldTypeEdge.builder() + .edgeSource(nestedMessageVertex) + .edgeTarget(field) + .type(field.fieldPathType()) + .build() + .inGraph(this); } - }); - }) - ); - - // attach field paths to root message - Map> fieldMap = vertexSet().stream() - .filter(v -> v instanceof ProtobufField && incomingEdgesOf(v).stream().noneMatch(e -> e.getEdgeSource() instanceof ProtobufOneOfField)) - .map(v -> (ProtobufField) v) - .collect(Collectors.groupingBy(ProtobufField::parentMessageName)); - - edgeSet().stream().filter(FieldTypeEdge::isMessageType).collect(Collectors.toSet()) - .stream().map(e -> (ProtobufField) e.getEdgeTarget()) - .forEach(f -> attachNestedMessageFields(fieldMap, f)); - } - + }); - private void addEnum(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - messageProto.getEnumTypeList().forEach(enumProto -> { - ProtobufEnum enumVertex = ProtobufEnum.enumBuilder() - .fileProto(fileProto) - .messageProto(messageProto) - .enumProto(enumProto) - .build(); - addVertex(enumVertex); - }); - } - - private void addNestedMessage(DescriptorProtos.FileDescriptorProto fileProto, DescriptorProtos.DescriptorProto messageProto) { - if (messageProto.getNestedTypeCount() < 1) { - return; - } - - messageProto.getNestedTypeList().forEach(nestedMessageProto -> { - ProtobufMessage nestedMessageVertex = ProtobufMessage.builder() - .fileProto(fileProto) - .parentMessageProto(messageProto) - .messageProto(nestedMessageProto) - .build(); - addVertex(nestedMessageVertex); - - nestedMessageProto.getFieldList().forEach(nestedFieldProto -> { - ProtobufField field = ProtobufField.builder() - .protobufMessage(nestedMessageVertex) - .fieldProto(nestedFieldProto) - .isNestedType(true) - .build(); - - // Add field vertex - addVertex(field); - - // Add schema to field edge - if (!field.isMessage()) { - FieldTypeEdge.builder() - .edgeSource(nestedMessageVertex) - .edgeTarget(field) - .type(field.fieldPathType()) - .build().inGraph(this); - } + addNestedMessage(fileProto, nestedMessageProto); + }); + } + + private Stream addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { + // Handle oneOf + ProtobufField oneOfVertex = + ProtobufOneOfField.oneOfBuilder() + .protobufMessage(messageVertex) + .fieldProto(fieldVertex.getFieldProto()) + .build(); + addVertex(oneOfVertex); + + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(oneOfVertex) + .type(oneOfVertex.fieldPathType()) + .build() + .inGraph(this); + + // Add oneOf field to field edge + FieldTypeEdge.builder() + .edgeSource(oneOfVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(oneOfVertex); + } + + private Stream linkMessageToField( + ProtobufMessage messageVertex, ProtobufField fieldVertex) { + FieldTypeEdge.builder() + .edgeSource(messageVertex) + .edgeTarget(fieldVertex) + .type(fieldVertex.fieldPathType()) + .isMessageType(fieldVertex.isMessage()) + .build() + .inGraph(this); + + return Stream.of(fieldVertex); + } + + private void attachNestedMessageFields( + Map> fieldMap, ProtobufField messageField) { + fieldMap + .getOrDefault(messageField.nativeType(), List.of()) + .forEach( + target -> { + FieldTypeEdge.builder() + .edgeSource(messageField) + .edgeTarget(target) + .type(target.fieldPathType()) + .isMessageType(target.isMessage()) + .build() + .inGraph(this); + }); + } + + private static final Set GOOGLE_WRAPPERS = + Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); + + private void flattenGoogleWrapped() { + HashSet removeVertices = new HashSet<>(); + HashSet removeEdges = new HashSet<>(); + HashSet addVertices = new HashSet<>(); + HashSet addEdges = new HashSet<>(); + + Set googleWrapped = + vertexSet().stream() + .filter( + v -> + v instanceof ProtobufMessage + && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) + .collect(Collectors.toSet()); + removeVertices.addAll(googleWrapped); + + Set wrappedPrimitiveFields = + googleWrapped.stream() + .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) + .map(FieldTypeEdge::getEdgeTarget) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(wrappedPrimitiveFields); + + wrappedPrimitiveFields.stream() + .filter(fld -> fld.getNumber() == 1) + .forEach( + primitiveField -> { + // remove incoming old edges to primitive + removeEdges.addAll(incomingEdgesOf(primitiveField)); + + Set originatingFields = + incomingEdgesOf(primitiveField).stream() + .map(FieldTypeEdge::getEdgeSource) + .filter(edgeSource -> !googleWrapped.contains(edgeSource)) + .map(ProtobufField.class::cast) + .collect(Collectors.toSet()); + removeVertices.addAll(originatingFields); + + originatingFields.forEach( + originatingField -> { + // Replacement Field + ProtobufElement fieldVertex = + originatingField.toBuilder() + .fieldPathType(primitiveField.fieldPathType()) + .schemaFieldDataType(primitiveField.schemaFieldDataType()) + .isMessageType(false) + .build(); + addVertices.add(fieldVertex); + + // link source field parent directly to primitive + Set incomingEdges = incomingEdgesOf(originatingField); + removeEdges.addAll(incomingEdgesOf(originatingField)); + addEdges.addAll( + incomingEdges.stream() + .map( + oldEdge -> + // Replace old edge with new edge to primitive + FieldTypeEdge.builder() + .edgeSource(oldEdge.getEdgeSource()) + .edgeTarget(fieldVertex) + .type(primitiveField.fieldPathType()) + .isMessageType(false) // known primitive + .build()) + .collect(Collectors.toSet())); + }); + + // remove old fields + removeVertices.addAll(originatingFields); }); - addNestedMessage(fileProto, nestedMessageProto); - }); - } - - private Stream addOneOf(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - // Handle oneOf - ProtobufField oneOfVertex = ProtobufOneOfField.oneOfBuilder() - .protobufMessage(messageVertex) - .fieldProto(fieldVertex.getFieldProto()) - .build(); - addVertex(oneOfVertex); - - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(oneOfVertex) - .type(oneOfVertex.fieldPathType()) - .build().inGraph(this); - - // Add oneOf field to field edge - FieldTypeEdge.builder() - .edgeSource(oneOfVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(oneOfVertex); + // Remove edges + removeAllEdges(removeEdges); + // Remove vertices + removeAllVertices(removeVertices); + // Add vertices + addVertices.forEach(this::addVertex); + // Add edges + addEdges.forEach(e -> e.inGraph(this)); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - private Stream linkMessageToField(ProtobufMessage messageVertex, ProtobufField fieldVertex) { - FieldTypeEdge.builder() - .edgeSource(messageVertex) - .edgeTarget(fieldVertex) - .type(fieldVertex.fieldPathType()) - .isMessageType(fieldVertex.isMessage()) - .build().inGraph(this); - - return Stream.of(fieldVertex); + if (o == null || getClass() != o.getClass()) { + return false; } - - private void attachNestedMessageFields(Map> fieldMap, ProtobufField messageField) { - fieldMap.getOrDefault(messageField.nativeType(), List.of()).forEach(target -> { - FieldTypeEdge.builder() - .edgeSource(messageField) - .edgeTarget(target) - .type(target.fieldPathType()) - .isMessageType(target.isMessage()) - .build().inGraph(this); - }); + if (!super.equals(o)) { + return false; } - private static final Set GOOGLE_WRAPPERS = Set.of("google/protobuf/wrappers.proto", "google/protobuf/timestamp.proto"); - private void flattenGoogleWrapped() { - HashSet removeVertices = new HashSet<>(); - HashSet removeEdges = new HashSet<>(); - HashSet addVertices = new HashSet<>(); - HashSet addEdges = new HashSet<>(); + ProtobufGraph that = (ProtobufGraph) o; - Set googleWrapped = vertexSet().stream() - .filter(v -> v instanceof ProtobufMessage - && GOOGLE_WRAPPERS.contains(v.fileProto().getName())) - .collect(Collectors.toSet()); - removeVertices.addAll(googleWrapped); - - Set wrappedPrimitiveFields = googleWrapped.stream() - .flatMap(wrapped -> outgoingEdgesOf(wrapped).stream()) - .map(FieldTypeEdge::getEdgeTarget) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(wrappedPrimitiveFields); - - wrappedPrimitiveFields.stream().filter(fld -> fld.getNumber() == 1).forEach(primitiveField -> { - // remove incoming old edges to primitive - removeEdges.addAll(incomingEdgesOf(primitiveField)); - - Set originatingFields = incomingEdgesOf(primitiveField).stream() - .map(FieldTypeEdge::getEdgeSource) - .filter(edgeSource -> !googleWrapped.contains(edgeSource)) - .map(ProtobufField.class::cast) - .collect(Collectors.toSet()); - removeVertices.addAll(originatingFields); - - originatingFields.forEach(originatingField -> { - // Replacement Field - ProtobufElement fieldVertex = originatingField.toBuilder() - .fieldPathType(primitiveField.fieldPathType()) - .schemaFieldDataType(primitiveField.schemaFieldDataType()) - .isMessageType(false) - .build(); - addVertices.add(fieldVertex); - - // link source field parent directly to primitive - Set incomingEdges = incomingEdgesOf(originatingField); - removeEdges.addAll(incomingEdgesOf(originatingField)); - addEdges.addAll(incomingEdges.stream().map(oldEdge -> - // Replace old edge with new edge to primitive - FieldTypeEdge.builder() - .edgeSource(oldEdge.getEdgeSource()) - .edgeTarget(fieldVertex) - .type(primitiveField.fieldPathType()) - .isMessageType(false) // known primitive - .build()).collect(Collectors.toSet())); - }); - - // remove old fields - removeVertices.addAll(originatingFields); - }); - - // Remove edges - removeAllEdges(removeEdges); - // Remove vertices - removeAllVertices(removeVertices); - // Add vertices - addVertices.forEach(this::addVertex); - // Add edges - addEdges.forEach(e -> e.inGraph(this)); - } + return rootProtobufMessage.equals(that.rootProtobufMessage); + } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - if (!super.equals(o)) { - return false; - } - - ProtobufGraph that = (ProtobufGraph) o; - - return rootProtobufMessage.equals(that.rootProtobufMessage); - } + @Override + public int hashCode() { + int result = super.hashCode(); + result = 31 * result + rootProtobufMessage.hashCode(); + return result; + } - @Override - public int hashCode() { - int result = super.hashCode(); - result = 31 * result + rootProtobufMessage.hashCode(); - return result; - } - - public String getHash() { - return String.valueOf(super.hashCode()); - } + public String getHash() { + return String.valueOf(super.hashCode()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java index 6b46b11231623..62f02a47a6c86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufMessage.java @@ -4,119 +4,117 @@ import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; -import lombok.AllArgsConstructor; -import lombok.Builder; -import java.util.Arrays; -import java.util.stream.Stream; import com.linkedin.schema.SchemaFieldDataType; - import datahub.protobuf.ProtobufUtils; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - - +import java.util.Arrays; +import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor public class ProtobufMessage implements ProtobufElement { - private final DescriptorProto messageProto; - private final DescriptorProto parentMessageProto; - private final FileDescriptorProto fileProto; - - @Override - public String name() { - return messageProto.getName(); - } - - @Override - public String fullName() { - if (parentMessageProto != null) { - return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); - } - return String.join(".", fileProto.getPackage(), name()); + private final DescriptorProto messageProto; + private final DescriptorProto parentMessageProto; + private final FileDescriptorProto fileProto; + + @Override + public String name() { + return messageProto.getName(); + } + + @Override + public String fullName() { + if (parentMessageProto != null) { + return String.join(".", fileProto.getPackage(), parentMessageProto.getName(), name()); } - - @Override - public String nativeType() { - return fullName(); - } - - @Override - public String fieldPathType() { - return String.format("[type=%s]", nativeType().replace(".", "_")); - } - - @Override - public FileDescriptorProto fileProto() { - return fileProto; - } - - @Override - public DescriptorProto messageProto() { - return messageProto; + return String.join(".", fileProto.getPackage(), name()); + } + + @Override + public String nativeType() { + return fullName(); + } + + @Override + public String fieldPathType() { + return String.format("[type=%s]", nativeType().replace(".", "_")); + } + + @Override + public FileDescriptorProto fileProto() { + return fileProto; + } + + @Override + public DescriptorProto messageProto() { + return messageProto; + } + + public SchemaFieldDataType schemaFieldDataType() { + if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); } - - public SchemaFieldDataType schemaFieldDataType() { - if (parentMessageProto != null && messageProto.getName().equals("MapFieldEntry")) { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())); - } - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())); + } + + public int majorVersion() { + return Integer.parseInt( + Arrays.stream(fileProto.getName().split("/")) + .filter(p -> p.matches("^v[0-9]+$")) + .findFirst() + .map(p -> p.replace("v", "")) + .orElse("1")); + } + + @Override + public String comment() { + return messageLocations().map(ProtobufUtils::collapseLocationComments).findFirst().orElse(""); + } + + @Override + public Stream accept(ProtobufModelVisitor visitor, VisitContext context) { + return visitor.visitMessage(this, context); + } + + @Override + public String toString() { + return String.format("ProtobufMessage[%s]", fullName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - public int majorVersion() { - return Integer.parseInt(Arrays.stream(fileProto.getName().split("/")) - .filter(p -> p.matches("^v[0-9]+$")) - .findFirst() - .map(p -> p.replace("v", "")) - .orElse("1")); + if (o == null || getClass() != o.getClass()) { + return false; } - @Override - public String comment() { - return messageLocations() - .map(ProtobufUtils::collapseLocationComments) - .findFirst().orElse(""); - } + ProtobufMessage that = (ProtobufMessage) o; - @Override - public Stream accept(ProtobufModelVisitor visitor, VisitContext context) { - return visitor.visitMessage(this, context); + if (!fullName().equals(that.fullName())) { + return false; } - - @Override - public String toString() { - return String.format("ProtobufMessage[%s]", fullName()); + if (!messageProto.equals(that.messageProto)) { + return false; } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - ProtobufMessage that = (ProtobufMessage) o; - - if (!fullName().equals(that.fullName())) { - return false; - } - if (!messageProto.equals(that.messageProto)) { - return false; - } - if (parentMessageProto != null ? !parentMessageProto.equals(that.parentMessageProto) : that.parentMessageProto != null) { - return false; - } - return fileProto.equals(that.fileProto); - } - - @Override - public int hashCode() { - int result = messageProto.hashCode(); - result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); - result = 31 * result + fileProto.hashCode(); - result = 31 * result + fullName().hashCode(); - return result; + if (parentMessageProto != null + ? !parentMessageProto.equals(that.parentMessageProto) + : that.parentMessageProto != null) { + return false; } + return fileProto.equals(that.fileProto); + } + + @Override + public int hashCode() { + int result = messageProto.hashCode(); + result = 31 * result + (parentMessageProto != null ? parentMessageProto.hashCode() : 0); + result = 31 * result + fileProto.hashCode(); + result = 31 * result + fullName().hashCode(); + return result; + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java index 514d84b1cff2a..08c157f4c9c71 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/model/ProtobufOneOfField.java @@ -5,61 +5,60 @@ import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; import datahub.protobuf.ProtobufUtils; +import java.util.stream.Collectors; import lombok.Builder; import lombok.Getter; -import java.util.stream.Collectors; - - @Getter public class ProtobufOneOfField extends ProtobufField { - public static final String NATIVE_TYPE = "oneof"; - public static final String FIELD_PATH_TYPE = "[type=union]"; + public static final String NATIVE_TYPE = "oneof"; + public static final String FIELD_PATH_TYPE = "[type=union]"; - @Builder(builderMethodName = "oneOfBuilder") - public ProtobufOneOfField(ProtobufMessage protobufMessage, - FieldDescriptorProto fieldProto) { - super(protobufMessage, fieldProto, null, null, null, null, null); - } + @Builder(builderMethodName = "oneOfBuilder") + public ProtobufOneOfField(ProtobufMessage protobufMessage, FieldDescriptorProto fieldProto) { + super(protobufMessage, fieldProto, null, null, null, null, null); + } - @Override - public String name() { - return oneOfProto().getName(); - } + @Override + public String name() { + return oneOfProto().getName(); + } - @Override - public String fieldPathType() { - return FIELD_PATH_TYPE; - } + @Override + public String fieldPathType() { + return FIELD_PATH_TYPE; + } - @Override - public String nativeType() { - return NATIVE_TYPE; - } + @Override + public String nativeType() { + return NATIVE_TYPE; + } - @Override - public boolean isMessage() { - return false; - } + @Override + public boolean isMessage() { + return false; + } - @Override - public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { - return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); - } + @Override + public SchemaFieldDataType schemaFieldDataType() throws IllegalStateException { + return new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())); + } - @Override - public String comment() { - return messageLocations() - .filter(loc -> loc.getPathCount() > 3 - && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER - && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) - .map(ProtobufUtils::collapseLocationComments) - .collect(Collectors.joining("\n")) - .trim(); - } + @Override + public String comment() { + return messageLocations() + .filter( + loc -> + loc.getPathCount() > 3 + && loc.getPath(2) == DescriptorProto.ONEOF_DECL_FIELD_NUMBER + && oneOfProto() == messageProto().getOneofDecl(loc.getPath(3))) + .map(ProtobufUtils::collapseLocationComments) + .collect(Collectors.joining("\n")) + .trim(); + } - @Override - public String toString() { - return String.format("ProtobufOneOf[%s]", fullName()); - } + @Override + public String toString() { + return String.format("ProtobufOneOf[%s]", fullName()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java index c14217fb9add2..085516a025e0c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufExtensionUtil.java @@ -9,7 +9,6 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.tag.TagProperties; import com.linkedin.util.Pair; - import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -21,140 +20,200 @@ public class ProtobufExtensionUtil { - private ProtobufExtensionUtil() { } + private ProtobufExtensionUtil() {} - public static DescriptorProtos.FieldDescriptorProto extendProto(DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { - try { - return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); - } catch (InvalidProtocolBufferException e) { - throw new RuntimeException(e); - } + public static DescriptorProtos.FieldDescriptorProto extendProto( + DescriptorProtos.FieldDescriptorProto proto, ExtensionRegistry registry) { + try { + return DescriptorProtos.FieldDescriptorProto.parseFrom(proto.toByteArray(), registry); + } catch (InvalidProtocolBufferException e) { + throw new RuntimeException(e); } + } - public enum DataHubMetadataType { - PROPERTY, TAG, TAG_LIST, TERM, OWNER, DOMAIN, DEPRECATION; + public enum DataHubMetadataType { + PROPERTY, + TAG, + TAG_LIST, + TERM, + OWNER, + DOMAIN, + DEPRECATION; - public static final String PROTOBUF_TYPE = "DataHubMetadataType"; - } + public static final String PROTOBUF_TYPE = "DataHubMetadataType"; + } - public static List> filterByDataHubType(List> options, - ExtensionRegistry registry, DataHubMetadataType filterType) { - return options.stream() - .filter(entry -> { - DescriptorProtos.FieldDescriptorProto extendedProtoOptions = extendProto(entry.getKey().toProto(), registry); - Optional dataHubMetadataType = extendedProtoOptions.getOptions().getAllFields().entrySet().stream() - .filter(extEntry -> extEntry.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.ENUM) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection) extEntry.getValue()).stream(); - } else { - return Stream.of((Descriptors.EnumValueDescriptor) extEntry.getValue()); - } - }) - .filter(enumDesc -> enumDesc.getType().getFullName().endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) - .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) - .filter(dhmt -> dhmt.equals(filterType)) - .findFirst(); + public static List> filterByDataHubType( + List> options, + ExtensionRegistry registry, + DataHubMetadataType filterType) { + return options.stream() + .filter( + entry -> { + DescriptorProtos.FieldDescriptorProto extendedProtoOptions = + extendProto(entry.getKey().toProto(), registry); + Optional dataHubMetadataType = + extendedProtoOptions.getOptions().getAllFields().entrySet().stream() + .filter( + extEntry -> + extEntry.getKey().getJavaType() + == Descriptors.FieldDescriptor.JavaType.ENUM) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection) + extEntry.getValue()) + .stream(); + } else { + return Stream.of( + (Descriptors.EnumValueDescriptor) extEntry.getValue()); + } + }) + .filter( + enumDesc -> + enumDesc + .getType() + .getFullName() + .endsWith("." + DataHubMetadataType.PROTOBUF_TYPE)) + .map(enumDesc -> DataHubMetadataType.valueOf(enumDesc.getName())) + .filter(dhmt -> dhmt.equals(filterType)) + .findFirst(); - return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); - }).collect(Collectors.toList()); - } + return filterType.equals(dataHubMetadataType.orElse(DataHubMetadataType.PROPERTY)); + }) + .collect(Collectors.toList()); + } - public static Stream> getProperties(Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { - return value.getUnknownFields().asMap().entrySet().stream().map(unknown -> { - Descriptors.FieldDescriptor fieldDesc = field.getMessageType().findFieldByNumber(unknown.getKey()); - String fieldValue = unknown.getValue().getLengthDelimitedList().stream().map(ByteString::toStringUtf8).collect(Collectors.joining("")); - return Map.entry(String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); - }); - } + public static Stream> getProperties( + Descriptors.FieldDescriptor field, DescriptorProtos.DescriptorProto value) { + return value.getUnknownFields().asMap().entrySet().stream() + .map( + unknown -> { + Descriptors.FieldDescriptor fieldDesc = + field.getMessageType().findFieldByNumber(unknown.getKey()); + String fieldValue = + unknown.getValue().getLengthDelimitedList().stream() + .map(ByteString::toStringUtf8) + .collect(Collectors.joining("")); + return Map.entry( + String.join(".", field.getFullName(), fieldDesc.getName()), fieldValue); + }); + } - public static Stream extractTagPropertiesFromOptions(List> options, ExtensionRegistry registry) { - Stream tags = filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } + public static Stream extractTagPropertiesFromOptions( + List> options, ExtensionRegistry registry) { + Stream tags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new TagProperties() - .setName(String.format("%s.%s", entry.getKey().getName(), entry.getValue().toString())) - .setDescription(entry.getKey().getFullName()); - case BOOLEAN: - if ((boolean) entry.getValue()) { - return new TagProperties() - .setName(entry.getKey().getName()) - .setDescription(String.format("%s is true.", entry.getKey().getFullName())); - } - return null; - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - String others = entry.getKey().getEnumType().getValues().stream() - .map(Descriptors.EnumValueDescriptor::getName).collect(Collectors.joining(", ")); - return new TagProperties() - .setName(name) - .setDescription(String.format("Enum %s of {%s}", name, others)); - default: - return null; - } - }).filter(Objects::nonNull); - - Stream tagListTags = filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return Arrays.stream(entry.getValue().toString().split(",")) - .map(t -> new TagProperties() - .setName(t.trim()) - .setDescription(entry.getKey().getFullName())); - default: - return Stream.empty(); - } - }).filter(Objects::nonNull); + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new TagProperties() + .setName( + String.format( + "%s.%s", entry.getKey().getName(), entry.getValue().toString())) + .setDescription(entry.getKey().getFullName()); + case BOOLEAN: + if ((boolean) entry.getValue()) { + return new TagProperties() + .setName(entry.getKey().getName()) + .setDescription( + String.format("%s is true.", entry.getKey().getFullName())); + } + return null; + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = + String.format("%s.%s", desc.getType().getName(), desc.getName()); + String others = + entry.getKey().getEnumType().getValues().stream() + .map(Descriptors.EnumValueDescriptor::getName) + .collect(Collectors.joining(", ")); + return new TagProperties() + .setName(name) + .setDescription(String.format("Enum %s of {%s}", name, others)); + default: + return null; + } + }) + .filter(Objects::nonNull); - Stream deprecationTag; - if (options.stream().anyMatch(opt -> opt.getKey().getFullName().endsWith(".deprecated") - && opt.getKey().getFullName().startsWith("google.protobuf.") - && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN - && (Boolean) opt.getValue())) { - deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); - } else { - deprecationTag = Stream.empty(); - } + Stream tagListTags = + filterByDataHubType(options, registry, DataHubMetadataType.TAG_LIST).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return Arrays.stream(entry.getValue().toString().split(",")) + .map( + t -> + new TagProperties() + .setName(t.trim()) + .setDescription(entry.getKey().getFullName())); + default: + return Stream.empty(); + } + }) + .filter(Objects::nonNull); - return Stream.of(tags, tagListTags, deprecationTag).reduce(Stream::concat).orElse(Stream.empty()); + Stream deprecationTag; + if (options.stream() + .anyMatch( + opt -> + opt.getKey().getFullName().endsWith(".deprecated") + && opt.getKey().getFullName().startsWith("google.protobuf.") + && opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.BOOLEAN + && (Boolean) opt.getValue())) { + deprecationTag = Stream.of(new TagProperties().setName("deprecated").setColorHex("#FF0000")); + } else { + deprecationTag = Stream.empty(); } - public static Stream extractTermAssociationsFromOptions(List> fieldOptions, - ExtensionRegistry registry) { - return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() - .filter(e -> e.getKey().isExtension()) - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection) extEntry.getValue()).stream().map(v -> Pair.of(extEntry.getKey(), v)); - } else { - return Stream.of(extEntry); - } - }) - .map(entry -> { - switch (entry.getKey().getJavaType()) { - case STRING: - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(entry.getValue().toString())); - case ENUM: - Descriptors.EnumValueDescriptor desc = (Descriptors.EnumValueDescriptor) entry.getValue(); - String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); - return new GlossaryTermAssociation() - .setUrn(new GlossaryTermUrn(name)); - default: - return null; - } - }).filter(Objects::nonNull); - } + return Stream.of(tags, tagListTags, deprecationTag) + .reduce(Stream::concat) + .orElse(Stream.empty()); + } + + public static Stream extractTermAssociationsFromOptions( + List> fieldOptions, ExtensionRegistry registry) { + return filterByDataHubType(fieldOptions, registry, DataHubMetadataType.TERM).stream() + .filter(e -> e.getKey().isExtension()) + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection) extEntry.getValue()) + .stream().map(v -> Pair.of(extEntry.getKey(), v)); + } else { + return Stream.of(extEntry); + } + }) + .map( + entry -> { + switch (entry.getKey().getJavaType()) { + case STRING: + return new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn(entry.getValue().toString())); + case ENUM: + Descriptors.EnumValueDescriptor desc = + (Descriptors.EnumValueDescriptor) entry.getValue(); + String name = String.format("%s.%s", desc.getType().getName(), desc.getName()); + return new GlossaryTermAssociation().setUrn(new GlossaryTermUrn(name)); + default: + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java index b5c630302d946..336de520a96bd 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/ProtobufModelVisitor.java @@ -1,24 +1,24 @@ package datahub.protobuf.visitors; - import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufMessage; - import java.util.stream.Stream; public interface ProtobufModelVisitor { - default Stream visitField(ProtobufField field, VisitContext context) { - return visitElement(field, context); - } - default Stream visitMessage(ProtobufMessage message, VisitContext context) { - return visitElement(message, context); - } + default Stream visitField(ProtobufField field, VisitContext context) { + return visitElement(field, context); + } + + default Stream visitMessage(ProtobufMessage message, VisitContext context) { + return visitElement(message, context); + } + + default Stream visitElement(ProtobufElement element, VisitContext context) { + return Stream.of(); + } - default Stream visitElement(ProtobufElement element, VisitContext context) { - return Stream.of(); - } - default Stream visitGraph(VisitContext context) { - return Stream.of(); - } + default Stream visitGraph(VisitContext context) { + return Stream.of(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java index 51c92332d98a0..5718b0a8a2ae6 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/VisitContext.java @@ -7,55 +7,56 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.model.ProtobufMessage; -import lombok.Builder; -import lombok.Getter; -import org.jgrapht.GraphPath; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; +import lombok.Builder; +import lombok.Getter; +import org.jgrapht.GraphPath; @Builder @Getter public class VisitContext { - public static final String FIELD_PATH_VERSION = "[version=2.0]"; - - private final DatasetUrn datasetUrn; - private final ProtobufGraph graph; - private final AuditStamp auditStamp; - - public ProtobufMessage root() { - return graph.root(); - } - - public Stream> streamAllPaths(ProtobufField field) { - return graph.getAllPaths(root(), field).stream(); - } - - public String getFieldPath(GraphPath path) { - String fieldPathString = path.getEdgeList().stream() - .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) - .collect(Collectors.joining(".")); - return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); - } - - // This is because order matters for the frontend. Both for matching the protobuf field order - // and also the nested struct's fieldPaths - public Double calculateSortOrder(GraphPath path, ProtobufField field) { - List weights = path.getEdgeList().stream() - .map(FieldTypeEdge::getEdgeTarget) - .filter(f -> f instanceof ProtobufField) - .map(f -> ((ProtobufField) f).sortWeight()) - .collect(Collectors.toList()); - - return IntStream.range(0, weights.size()) - .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) - .reduce(Double::sum) - .orElse(0); - } - - public static class VisitContextBuilder { - - }; + public static final String FIELD_PATH_VERSION = "[version=2.0]"; + + private final DatasetUrn datasetUrn; + private final ProtobufGraph graph; + private final AuditStamp auditStamp; + + public ProtobufMessage root() { + return graph.root(); + } + + public Stream> streamAllPaths(ProtobufField field) { + return graph.getAllPaths(root(), field).stream(); + } + + public String getFieldPath(GraphPath path) { + String fieldPathString = + path.getEdgeList().stream() + .flatMap(e -> Stream.of(e.getType(), e.getEdgeTarget().name())) + .collect(Collectors.joining(".")); + return String.join(".", FIELD_PATH_VERSION, root().fieldPathType(), fieldPathString); + } + + // This is because order matters for the frontend. Both for matching the protobuf field order + // and also the nested struct's fieldPaths + public Double calculateSortOrder( + GraphPath path, ProtobufField field) { + List weights = + path.getEdgeList().stream() + .map(FieldTypeEdge::getEdgeTarget) + .filter(f -> f instanceof ProtobufField) + .map(f -> ((ProtobufField) f).sortWeight()) + .collect(Collectors.toList()); + + return IntStream.range(0, weights.size()) + .mapToDouble(i -> weights.get(i) * (1.0 / (i + 1))) + .reduce(Double::sum) + .orElse(0); + } + + public static class VisitContextBuilder {} + ; } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java index 80dc05d33e17d..1b03e13705910 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DatasetVisitor.java @@ -21,13 +21,10 @@ import com.linkedin.dataset.DatasetProperties; import com.linkedin.domain.Domains; import com.linkedin.events.metadata.ChangeType; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; -import lombok.AllArgsConstructor; -import lombok.Builder; - import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -35,76 +32,145 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.AllArgsConstructor; +import lombok.Builder; @Builder @AllArgsConstructor -public class DatasetVisitor implements ProtobufModelVisitor> { - @Builder.Default - private final List> institutionalMemoryMetadataVisitors = List.of(); - @Builder.Default - private final List> datasetPropertyVisitors = List.of(); - @Builder.Default - private final List> tagAssociationVisitors = List.of(); - @Builder.Default - private final List> termAssociationVisitors = List.of(); - @Builder.Default - private final List> ownershipVisitors = List.of(); - @Builder.Default - private final List> domainVisitors = List.of(); - @Builder.Default - private final String protocBase64 = ""; - @Builder.Default - private final ProtobufModelVisitor descriptionVisitor = new DescriptionVisitor(); - @Builder.Default - private final ProtobufModelVisitor deprecationVisitor = new DeprecationVisitor(); +public class DatasetVisitor + implements ProtobufModelVisitor> { + @Builder.Default + private final List> + institutionalMemoryMetadataVisitors = List.of(); + + @Builder.Default + private final List> datasetPropertyVisitors = List.of(); + + @Builder.Default + private final List> tagAssociationVisitors = List.of(); + + @Builder.Default + private final List> termAssociationVisitors = + List.of(); + + @Builder.Default private final List> ownershipVisitors = List.of(); + + @Builder.Default + private final List> domainVisitors = List.of(); + + @Builder.Default private final String protocBase64 = ""; + + @Builder.Default + private final ProtobufModelVisitor descriptionVisitor = new DescriptionVisitor(); + + @Builder.Default + private final ProtobufModelVisitor deprecationVisitor = new DeprecationVisitor(); - @Override - public Stream> visitGraph(VisitContext context) { - final String datasetUrn = context.getDatasetUrn().toString(); - final ProtobufGraph g = context.getGraph(); + @Override + public Stream> visitGraph( + VisitContext context) { + final String datasetUrn = context.getDatasetUrn().toString(); + final ProtobufGraph g = context.getGraph(); - return Stream.of( - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new DatasetProperties() - .setName(context.getDatasetUrn() + return Stream.of( + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new DatasetProperties() + .setName( + context + .getDatasetUrn() .getDatasetNameEntity() - .substring(context.getDatasetUrn() - .getDatasetNameEntity() - .lastIndexOf(".") + 1)) - .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) - .setDescription(g.accept(context, List.of(descriptionVisitor)).collect(Collectors.joining("\n"))) - .setCustomProperties(new StringMap( - Stream.concat( - Stream.of(Map.entry("protoc", protocBase64)), - g.accept(context, datasetPropertyVisitors).flatMap(props -> props.getCustomProperties().entrySet().stream())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - )), "datasetProperties"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, new InstitutionalMemory().setElements( + .substring( + context.getDatasetUrn().getDatasetNameEntity().lastIndexOf(".") + + 1)) + .setQualifiedName(context.getDatasetUrn().getDatasetNameEntity()) + .setDescription( + g.accept(context, List.of(descriptionVisitor)) + .collect(Collectors.joining("\n"))) + .setCustomProperties( + new StringMap( + Stream.concat( + Stream.of(Map.entry("protoc", protocBase64)), + g.accept(context, datasetPropertyVisitors) + .flatMap( + props -> + props.getCustomProperties().entrySet().stream())) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))), + "datasetProperties"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new InstitutionalMemory() + .setElements( new InstitutionalMemoryMetadataArray( - g.accept(context, institutionalMemoryMetadataVisitors) - .map(inst -> inst.setCreateStamp(context.getAuditStamp())) - .collect(Collectors.toMap(InstitutionalMemoryMetadata::getUrl, Function.identity(), - (a1, a2) -> a1, LinkedHashMap::new)) - .values() - )), "institutionalMemory"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlobalTags().setTags(new TagAssociationArray( - g.accept(context, tagAssociationVisitors).collect(Collectors.toList()) - )), "globalTags"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - g.accept(context, termAssociationVisitors).collect(Collectors.toList()) - )).setAuditStamp(context.getAuditStamp()), "glossaryTerms"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Ownership().setOwners(new OwnerArray( - g.accept(context, ownershipVisitors).collect(Collectors.toList()) - )).setLastModified(context.getAuditStamp()), "ownership"), - new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - new Domains(new DataMap(Map.of("domains", - new UrnArray(g.accept(context, domainVisitors).collect(Collectors.toList())).data()))), "domains"), - g.accept(context, List.of(deprecationVisitor)).findFirst() - .map(dep -> new MetadataChangeProposalWrapper<>(DatasetUrn.ENTITY_TYPE, datasetUrn, ChangeType.UPSERT, - dep, "deprecation")).orElse(null) - ).filter(Objects::nonNull); - } + g.accept(context, institutionalMemoryMetadataVisitors) + .map(inst -> inst.setCreateStamp(context.getAuditStamp())) + .collect( + Collectors.toMap( + InstitutionalMemoryMetadata::getUrl, + Function.identity(), + (a1, a2) -> a1, + LinkedHashMap::new)) + .values())), + "institutionalMemory"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlobalTags() + .setTags( + new TagAssociationArray( + g.accept(context, tagAssociationVisitors) + .collect(Collectors.toList()))), + "globalTags"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + g.accept(context, termAssociationVisitors) + .collect(Collectors.toList()))) + .setAuditStamp(context.getAuditStamp()), + "glossaryTerms"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Ownership() + .setOwners( + new OwnerArray( + g.accept(context, ownershipVisitors).collect(Collectors.toList()))) + .setLastModified(context.getAuditStamp()), + "ownership"), + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + new Domains( + new DataMap( + Map.of( + "domains", + new UrnArray( + g.accept(context, domainVisitors).collect(Collectors.toList())) + .data()))), + "domains"), + g.accept(context, List.of(deprecationVisitor)) + .findFirst() + .map( + dep -> + new MetadataChangeProposalWrapper<>( + DatasetUrn.ENTITY_TYPE, + datasetUrn, + ChangeType.UPSERT, + dep, + "deprecation")) + .orElse(null)) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java index 612082e6a521b..46d17205e4219 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DeprecationVisitor.java @@ -1,53 +1,60 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.google.protobuf.Descriptors; import com.linkedin.common.Deprecation; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DeprecationVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - if (context.root().messageProto().getOptions().getDeprecated()) { - List> deprecationOptions = ProtobufExtensionUtil - .filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); - - String decommissionNote = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) - .flatMap(opt -> { - if (opt.getKey().isRepeated()) { - return ((Collection) opt.getValue()).stream(); - } else { - return Stream.of(opt.getValue()); - } - }) - .map(Object::toString) - .collect(Collectors.joining("\n")); - - Optional decommissionTime = deprecationOptions.stream() - .filter(opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) - .map(opt -> (Long) opt.getValue()) - .findFirst(); - - return Stream.of(new Deprecation() - .setDeprecated(true) - .setNote(decommissionNote) - .setDecommissionTime(decommissionTime.orElse(0L)) - .setActor(context.getAuditStamp().getActor())); - } else { - return Stream.empty(); - } + @Override + public Stream visitGraph(VisitContext context) { + if (context.root().messageProto().getOptions().getDeprecated()) { + List> deprecationOptions = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DEPRECATION); + + String decommissionNote = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.STRING) + .flatMap( + opt -> { + if (opt.getKey().isRepeated()) { + return ((Collection) opt.getValue()).stream(); + } else { + return Stream.of(opt.getValue()); + } + }) + .map(Object::toString) + .collect(Collectors.joining("\n")); + + Optional decommissionTime = + deprecationOptions.stream() + .filter( + opt -> opt.getKey().getJavaType() == Descriptors.FieldDescriptor.JavaType.LONG) + .map(opt -> (Long) opt.getValue()) + .findFirst(); + + return Stream.of( + new Deprecation() + .setDeprecated(true) + .setNote(decommissionNote) + .setDecommissionTime(decommissionTime.orElse(0L)) + .setActor(context.getAuditStamp().getActor())); + } else { + return Stream.empty(); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java index 802c7e0c05408..4bd7dd96d0db9 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DescriptionVisitor.java @@ -2,13 +2,12 @@ import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class DescriptionVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return Stream.of(context.root().comment()); - } + @Override + public Stream visitGraph(VisitContext context) { + return Stream.of(context.root().comment()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java index ac9d092c2392e..01908bb8c3b6d 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/DomainVisitor.java @@ -1,23 +1,24 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.Urn; import com.linkedin.util.Pair; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class DomainVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) - .stream().map(Pair::getValue).map(o -> - Urn.createFromTuple("domain", ((String) o).toLowerCase()) - ); - } + @Override + public Stream visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.DOMAIN) + .stream() + .map(Pair::getValue) + .map(o -> Urn.createFromTuple("domain", ((String) o).toLowerCase())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java index b6f52fe01c109..c4a29b1b70f61 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitor.java @@ -5,8 +5,6 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - -import javax.annotation.Nullable; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -18,123 +16,153 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.stream.StreamSupport; +import javax.annotation.Nullable; -public class InstitutionalMemoryVisitor implements ProtobufModelVisitor { - public static final String TEAM_DESC = "Github Team"; - public static final String SLACK_CHAN_DESC = "Slack Channel"; - - private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); - private static final Pattern LINK_REGEX = Pattern.compile("(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); - private final String githubOrganization; - private final Pattern githubTeamRegex; - private final String slackTeamId; - - public InstitutionalMemoryVisitor(@Nullable String slackTeamId, @Nullable String githubOrganization) { - this.slackTeamId = slackTeamId; - this.githubOrganization = githubOrganization; - if (githubOrganization != null) { - this.githubTeamRegex = Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); - } else { - this.githubTeamRegex = null; - } +public class InstitutionalMemoryVisitor + implements ProtobufModelVisitor { + public static final String TEAM_DESC = "Github Team"; + public static final String SLACK_CHAN_DESC = "Slack Channel"; + + private static final Pattern SLACK_CHANNEL_REGEX = Pattern.compile("(?si).*#([a-z0-9-]+).*"); + private static final Pattern LINK_REGEX = + Pattern.compile( + "(?s)(\\b(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|])"); + private final String githubOrganization; + private final Pattern githubTeamRegex; + private final String slackTeamId; + + public InstitutionalMemoryVisitor( + @Nullable String slackTeamId, @Nullable String githubOrganization) { + this.slackTeamId = slackTeamId; + this.githubOrganization = githubOrganization; + if (githubOrganization != null) { + this.githubTeamRegex = + Pattern.compile(String.format("(?si).*@%s/([a-z-]+).*", githubOrganization)); + } else { + this.githubTeamRegex = null; } - - // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 - private Optional slackLink(String text) { - return Optional.ofNullable(slackTeamId).map(teamId -> { - Matcher m = SLACK_CHANNEL_REGEX.matcher(text); - if (m.matches()) { - return new Url(String.format("https://slack.com/app_redirect?channel=%s&team=%s", m.group(1), slackTeamId)); - } else { + } + + // https://slack.com/app_redirect?channel=fdn-analytics-data-catalog&team=T024F4EL1 + private Optional slackLink(String text) { + return Optional.ofNullable(slackTeamId) + .map( + teamId -> { + Matcher m = SLACK_CHANNEL_REGEX.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://slack.com/app_redirect?channel=%s&team=%s", + m.group(1), slackTeamId)); + } else { return null; - } - }); - } - - private Optional teamLink(String text) { - return Optional.ofNullable(githubTeamRegex).map(regex -> { - Matcher m = regex.matcher(text); - if (m.matches()) { - return new Url(String.format("https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); - } else { + } + }); + } + + private Optional teamLink(String text) { + return Optional.ofNullable(githubTeamRegex) + .map( + regex -> { + Matcher m = regex.matcher(text); + if (m.matches()) { + return new Url( + String.format( + "https://github.com/orgs/%s/teams/%s", githubOrganization, m.group(1))); + } else { return null; - } - }); - } + } + }); + } - @Override - public Stream visitGraph(VisitContext context) { - List institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream visitGraph(VisitContext context) { + List institutionalMemoryMetadata = new LinkedList<>(); - teamLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + teamLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(TEAM_DESC) .setUrl(url))); - - slackLink(context.root().comment()).ifPresent(url -> - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + slackLink(context.root().comment()) + .ifPresent( + url -> + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) .setDescription(SLACK_CHAN_DESC) .setUrl(url))); - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, context.root().comment()).forEach(match -> { - cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() - .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s Reference %d", context.root().name(), cnt[0])) - .setUrl(new Url(match.group(1)))); - }); + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, context.root().comment()) + .forEach( + match -> { + cnt[0] += 1; + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() + .setCreateStamp(context.getAuditStamp()) + .setDescription( + String.format("%s Reference %d", context.root().name(), cnt[0])) + .setUrl(new Url(match.group(1)))); + }); - return institutionalMemoryMetadata.stream(); - } + return institutionalMemoryMetadata.stream(); + } - @Override - public Stream visitField(ProtobufField field, VisitContext context) { - List institutionalMemoryMetadata = new LinkedList<>(); + @Override + public Stream visitField(ProtobufField field, VisitContext context) { + List institutionalMemoryMetadata = new LinkedList<>(); - if (field.messageProto().equals(context.getGraph().root().messageProto())) { - final int[] cnt = {0}; - MatcherStream.findMatches(LINK_REGEX, field.comment()).forEach(match -> { + if (field.messageProto().equals(context.getGraph().root().messageProto())) { + final int[] cnt = {0}; + MatcherStream.findMatches(LINK_REGEX, field.comment()) + .forEach( + match -> { cnt[0] += 1; - institutionalMemoryMetadata.add(new InstitutionalMemoryMetadata() + institutionalMemoryMetadata.add( + new InstitutionalMemoryMetadata() .setCreateStamp(context.getAuditStamp()) - .setDescription(String.format("%s.%s Reference %d", + .setDescription( + String.format( + "%s.%s Reference %d", field.getProtobufMessage().name(), field.getFieldProto().getName(), cnt[0])) .setUrl(new Url(match.group(1)))); - }); - } + }); + } + + return institutionalMemoryMetadata.stream(); + } + + private static class MatcherStream { + private MatcherStream() {} - return institutionalMemoryMetadata.stream(); + public static Stream find(Pattern pattern, CharSequence input) { + return findMatches(pattern, input).map(MatchResult::group); } - private static class MatcherStream { - private MatcherStream() { } - - public static Stream find(Pattern pattern, CharSequence input) { - return findMatches(pattern, input).map(MatchResult::group); - } - - public static Stream findMatches( - Pattern pattern, CharSequence input) { - Matcher matcher = pattern.matcher(input); - - Spliterator spliterator = new Spliterators.AbstractSpliterator( - Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { - @Override - public boolean tryAdvance(Consumer action) { - if (!matcher.find()) { - return false; - } - action.accept(matcher.toMatchResult()); - return true; - } }; - - return StreamSupport.stream(spliterator, false); - } + public static Stream findMatches(Pattern pattern, CharSequence input) { + Matcher matcher = pattern.matcher(input); + + Spliterator spliterator = + new Spliterators.AbstractSpliterator( + Long.MAX_VALUE, Spliterator.ORDERED | Spliterator.NONNULL) { + @Override + public boolean tryAdvance(Consumer action) { + if (!matcher.find()) { + return false; + } + action.accept(matcher.toMatchResult()); + return true; + } + }; + + return StreamSupport.stream(spliterator, false); } + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java index 57ec38611d47f..d2132316fdef3 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitor.java @@ -4,28 +4,28 @@ import com.linkedin.dataset.DatasetProperties; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Map; import java.util.Optional; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Stream; - public class KafkaTopicPropertyVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return getKafkaTopic(context.root().comment()).stream().map(kafkaTopic -> - new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic))) - ); - } + @Override + public Stream visitGraph(VisitContext context) { + return getKafkaTopic(context.root().comment()).stream() + .map( + kafkaTopic -> + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", kafkaTopic)))); + } - private static final Pattern TOPIC_NAME_REGEX = Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); + private static final Pattern TOPIC_NAME_REGEX = + Pattern.compile("(?si).*kafka.+topic.+[`]([a-z._-]+)[`].*"); - private static Optional getKafkaTopic(String text) { - Matcher m = TOPIC_NAME_REGEX.matcher(text); - return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); - } + private static Optional getKafkaTopic(String text) { + Matcher m = TOPIC_NAME_REGEX.matcher(text); + return m.matches() ? Optional.of(m.group(1)) : Optional.empty(); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java index 7bb4d9860f72c..0a7081a35fa86 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/OwnershipVisitor.java @@ -1,5 +1,7 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; @@ -8,47 +10,55 @@ import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.net.URISyntaxException; import java.util.Collection; import java.util.Map; import java.util.Objects; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class OwnershipVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry(), - ProtobufExtensionUtil.DataHubMetadataType.OWNER) - .stream() - .flatMap(extEntry -> { - if (extEntry.getKey().isRepeated()) { - return ((Collection) extEntry.getValue()).stream().map(v -> Map.entry(extEntry.getKey(), v)); - } else { - return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); - } - }) - .map(entry -> { - try { - OwnershipType ownershipType; - try { - ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); - } catch (IllegalArgumentException e) { - ownershipType = OwnershipType.TECHNICAL_OWNER; - } + @Override + public Stream visitGraph(VisitContext context) { + return ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.OWNER) + .stream() + .flatMap( + extEntry -> { + if (extEntry.getKey().isRepeated()) { + return ((Collection) extEntry.getValue()) + .stream().map(v -> Map.entry(extEntry.getKey(), v)); + } else { + return Stream.of(Map.entry(extEntry.getKey(), (String) extEntry.getValue())); + } + }) + .map( + entry -> { + try { + OwnershipType ownershipType; + try { + ownershipType = OwnershipType.valueOf(entry.getKey().getName().toUpperCase()); + } catch (IllegalArgumentException e) { + ownershipType = OwnershipType.TECHNICAL_OWNER; + } - String[] id = entry.getValue().toLowerCase().split(":", 2); - return new Owner() - .setType(ownershipType) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(new Urn(id.length > 1 ? id[0].replaceFirst("corpgroup", "corpGroup") : "corpGroup", id[id.length - 1])); - } catch (URISyntaxException e) { - System.err.println(e.getMessage()); - return null; - } - }).filter(Objects::nonNull); - } + String[] id = entry.getValue().toLowerCase().split(":", 2); + return new Owner() + .setType(ownershipType) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner( + new Urn( + id.length > 1 + ? id[0].replaceFirst("corpgroup", "corpGroup") + : "corpGroup", + id[id.length - 1])); + } catch (URISyntaxException e) { + System.err.println(e.getMessage()); + return null; + } + }) + .filter(Objects::nonNull); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java index 9abd903f242aa..113cf6f1a548f 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/PropertyVisitor.java @@ -1,45 +1,56 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; +import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; + import com.google.gson.Gson; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.Descriptors; import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.Collection; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; -import static datahub.protobuf.visitors.ProtobufExtensionUtil.getProperties; - - public class PropertyVisitor implements ProtobufModelVisitor { - private static final Gson GSON = new Gson(); + private static final Gson GSON = new Gson(); - @Override - public Stream visitGraph(VisitContext context) { - Map properties = ProtobufExtensionUtil.filterByDataHubType(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry(), ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) - .stream().flatMap(fd -> { - if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { - if (fd.getKey().isRepeated()) { - return Stream.of(Map.entry(fd.getKey().getName(), GSON.toJson( - ((Collection) fd.getValue()).stream().map(Object::toString).collect(Collectors.toList())))); - } else { - return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); - } + @Override + public Stream visitGraph(VisitContext context) { + Map properties = + ProtobufExtensionUtil.filterByDataHubType( + getMessageOptions(context.root().messageProto()), + context.getGraph().getRegistry(), + ProtobufExtensionUtil.DataHubMetadataType.PROPERTY) + .stream() + .flatMap( + fd -> { + if (fd.getKey().getJavaType() != Descriptors.FieldDescriptor.JavaType.MESSAGE) { + if (fd.getKey().isRepeated()) { + return Stream.of( + Map.entry( + fd.getKey().getName(), + GSON.toJson( + ((Collection) fd.getValue()) + .stream() + .map(Object::toString) + .collect(Collectors.toList())))); } else { - Descriptors.FieldDescriptor field = fd.getKey(); - DescriptorProtos.DescriptorProto value = (DescriptorProtos.DescriptorProto) fd.getValue(); - return getProperties(field, value); + return Stream.of(Map.entry(fd.getKey().getName(), fd.getValue().toString())); } - }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } else { + Descriptors.FieldDescriptor field = fd.getKey(); + DescriptorProtos.DescriptorProto value = + (DescriptorProtos.DescriptorProto) fd.getValue(); + return getProperties(field, value); + } + }) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); - } + return Stream.of(new DatasetProperties().setCustomProperties(new StringMap(properties))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java index f0ca32fbbc2f8..6874044215241 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TagAssociationVisitor.java @@ -1,22 +1,20 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - - public class TagAssociationVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); - } + @Override + public Stream visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java index 7656bb5236825..b13bc0eed1152 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/dataset/TermAssociationVisitor.java @@ -1,19 +1,18 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlossaryTermAssociation; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class TermAssociationVisitor implements ProtobufModelVisitor { - @Override - public Stream visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()); - } + @Override + public Stream visitGraph(VisitContext context) { + return ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java index c67c7414e521b..240cf7b6d168b 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitor.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -13,41 +16,45 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufExtensionUtil; import datahub.protobuf.visitors.VisitContext; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - public class ProtobufExtensionFieldVisitor extends SchemaFieldVisitor { - @Override - public Stream> visitField(ProtobufField field, VisitContext context) { - boolean isPrimaryKey = getFieldOptions(field.getFieldProto()).stream().map(Pair::getKey) - .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); + @Override + public Stream> visitField(ProtobufField field, VisitContext context) { + boolean isPrimaryKey = + getFieldOptions(field.getFieldProto()).stream() + .map(Pair::getKey) + .anyMatch(fieldDesc -> fieldDesc.getName().matches("(?i).*primary_?key")); - List tags = Stream.concat( + List tags = + Stream.concat( ProtobufExtensionUtil.extractTagPropertiesFromOptions( - getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()), - promotedTags(field, context)) - .distinct().map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) - .sorted(Comparator.comparing(t -> t.getTag().getName())) - .collect(Collectors.toList()); + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + promotedTags(field, context)) + .distinct() + .map(tag -> new TagAssociation().setTag(new TagUrn(tag.getName()))) + .sorted(Comparator.comparing(t -> t.getTag().getName())) + .collect(Collectors.toList()); - List terms = Stream.concat( + List terms = + Stream.concat( ProtobufExtensionUtil.extractTermAssociationsFromOptions( - getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()), promotedTerms(field, context)) - .distinct() - .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) - .collect(Collectors.toList()); + .distinct() + .sorted(Comparator.comparing(a -> a.getUrn().getNameEntity())) + .collect(Collectors.toList()); - return context.streamAllPaths(field).map(path -> Pair.of( - new SchemaField() + return context + .streamAllPaths(field) + .map( + path -> + Pair.of( + new SchemaField() .setFieldPath(context.getFieldPath(path)) .setNullable(!isPrimaryKey) .setIsPartOfKey(isPrimaryKey) @@ -55,40 +62,48 @@ public Stream> visitField(ProtobufField field, VisitCo .setNativeDataType(field.nativeType()) .setType(field.schemaFieldDataType()) .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray(tags))) - .setGlossaryTerms(new GlossaryTerms() + .setGlossaryTerms( + new GlossaryTerms() .setTerms(new GlossaryTermAssociationArray(terms)) .setAuditStamp(context.getAuditStamp())), - context.calculateSortOrder(path, field))); - } + context.calculateSortOrder(path, field))); + } - /** - * Promote tags from nested message to field. - * @return tags - */ - private Stream promotedTags(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote tags from nested message to field. + * + * @return tags + */ + private Stream promotedTags(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } + } - /** - * Promote terms from nested message to field. - * @return terms - */ - private Stream promotedTerms(ProtobufField field, VisitContext context) { - if (field.isMessage()) { - return context.getGraph().outgoingEdgesOf(field).stream().flatMap(e -> - ProtobufExtensionUtil.extractTermAssociationsFromOptions(getMessageOptions(e.getEdgeTarget().messageProto()), - context.getGraph().getRegistry()) - ).distinct(); - } else { - return Stream.of(); - } + /** + * Promote terms from nested message to field. + * + * @return terms + */ + private Stream promotedTerms(ProtobufField field, VisitContext context) { + if (field.isMessage()) { + return context.getGraph().outgoingEdgesOf(field).stream() + .flatMap( + e -> + ProtobufExtensionUtil.extractTermAssociationsFromOptions( + getMessageOptions(e.getEdgeTarget().messageProto()), + context.getGraph().getRegistry())) + .distinct(); + } else { + return Stream.of(); } - + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java index 8f8da1970967d..46f9bc5f2f90c 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/field/SchemaFieldVisitor.java @@ -5,21 +5,23 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; - import java.util.stream.Stream; public class SchemaFieldVisitor implements ProtobufModelVisitor> { - @Override - public Stream> visitField(ProtobufField field, VisitContext context) { - return context.streamAllPaths(field).map(path -> + @Override + public Stream> visitField(ProtobufField field, VisitContext context) { + return context + .streamAllPaths(field) + .map( + path -> Pair.of( - new SchemaField() - .setFieldPath(context.getFieldPath(path)) - .setNullable(true) - .setDescription(field.comment()) - .setNativeDataType(field.nativeType()) - .setType(field.schemaFieldDataType()), - context.calculateSortOrder(path, field))); - } + new SchemaField() + .setFieldPath(context.getFieldPath(path)) + .setNullable(true) + .setDescription(field.comment()) + .setNativeDataType(field.nativeType()) + .setType(field.schemaFieldDataType()), + context.calculateSortOrder(path, field))); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java index eb416653232a1..ad6a3344e5b1e 100644 --- a/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java +++ b/metadata-integration/java/datahub-protobuf/src/main/java/datahub/protobuf/visitors/tags/TagVisitor.java @@ -1,43 +1,46 @@ package datahub.protobuf.visitors.tags; +import static datahub.protobuf.ProtobufUtils.getFieldOptions; +import static datahub.protobuf.ProtobufUtils.getMessageOptions; + import com.linkedin.common.urn.TagUrn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.tag.TagProperties; +import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufField; -import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.ProtobufExtensionUtil; +import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.ProtobufUtils.getFieldOptions; -import static datahub.protobuf.ProtobufUtils.getMessageOptions; - import java.util.stream.Stream; -public class TagVisitor implements ProtobufModelVisitor> { - private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; +public class TagVisitor + implements ProtobufModelVisitor> { + private static final String TAG_PROPERTIES_ASPECT = "tagProperties"; - @Override - public Stream> visitGraph(VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getMessageOptions(context.root().messageProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream> visitGraph( + VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getMessageOptions(context.root().messageProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - @Override - public Stream> visitField(ProtobufField field, VisitContext context) { - return ProtobufExtensionUtil.extractTagPropertiesFromOptions(getFieldOptions(field.getFieldProto()), - context.getGraph().getRegistry()) - .map(TagVisitor::wrapTagProperty); - } + @Override + public Stream> visitField( + ProtobufField field, VisitContext context) { + return ProtobufExtensionUtil.extractTagPropertiesFromOptions( + getFieldOptions(field.getFieldProto()), context.getGraph().getRegistry()) + .map(TagVisitor::wrapTagProperty); + } - private static MetadataChangeProposalWrapper wrapTagProperty(TagProperties tagProperty) { - return new MetadataChangeProposalWrapper<>( - "tag", - new TagUrn(tagProperty.getName()).toString(), - ChangeType.UPSERT, - tagProperty, - TAG_PROPERTIES_ASPECT); - } + private static MetadataChangeProposalWrapper wrapTagProperty( + TagProperties tagProperty) { + return new MetadataChangeProposalWrapper<>( + "tag", + new TagUrn(tagProperty.getName()).toString(), + ChangeType.UPSERT, + tagProperty, + TAG_PROPERTIES_ASPECT); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java index bbb8e532f1033..e96bb63220b04 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufDatasetTest.java @@ -1,5 +1,10 @@ package datahub.protobuf; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.linkedin.common.FabricType; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -9,9 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.url.Url; -import com.linkedin.data.template.StringArray; import com.linkedin.common.urn.DataPlatformUrn; -import com.linkedin.common.FabricType; +import com.linkedin.data.template.StringArray; import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -26,430 +30,701 @@ import datahub.protobuf.model.ProtobufField; import datahub.protobuf.visitors.ProtobufModelVisitor; import datahub.protobuf.visitors.VisitContext; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - +import org.junit.jupiter.api.Test; public class ProtobufDatasetTest { - @Test - public void noSchemaTest() throws IOException { - ProtobufDataset dataset = ProtobufDataset.builder() - .setDataPlatformUrn(new DataPlatformUrn("kafka")) - .setProtocIn(getTestProtoc("protobuf", "messageA")) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.DEV) - .build(); - - assertNotNull(dataset); - assertEquals(2, dataset.getAllMetadataChangeProposals().count()); - assertEquals(8, dataset.getDatasetMCPs().size()); - assertEquals(0, dataset.getVisitorMCPs().size()); - } - - @Test - public void platformSchemaTest() throws IOException { - assertEquals(getTestProtoSource("protobuf", "messageA"), - extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); - } - - @Test - public void messageA() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(9, testMetadata.getFields().size()); - - assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); - assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); - - assertEquals("platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); - - assertEquals(new InstitutionalMemory().setElements(new InstitutionalMemoryMetadataArray( - new InstitutionalMemoryMetadata() - .setDescription("Github Team") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setDescription("Slack Channel") - .setCreateStamp(TEST_AUDIT_STAMP) - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")))).data(), - test.getDatasetMCPs().get(1).getAspect().data()); - - assertEquals(new Status().setRemoved(false).data(), test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) - .setNativeDataType("bytes") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading single line comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Leading multiline comment\nSecond line of leading multiline comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("uint32") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Detached comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("uint64") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("Test repeated and trailing comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("string") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")).findFirst().orElseThrow()); - - } - - @Test - public void messageB() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(24, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Int64Value") - .setDescription("wrapped int64") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType()))) - .setNativeDataType("google.protobuf.BoolValue") - .setDescription("Indicator") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")).findFirst().orElseThrow()); - - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("message value") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")).findFirst().orElseThrow()); - } - - @Test - public void messageC() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(4, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - @SuppressWarnings("LineLength") - public void messageC2NestedOneOf() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); - - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - assertEquals(6, testMetadata.getFields().size()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageC2") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType().setNestedType(new StringArray())))) - .setNativeDataType("protobuf.MessageC3") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) - .setNativeDataType("oneof") - .setDescription("one of field comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string") - .setDescription("one of string comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setNullable(true) - .setIsPartOfKey(false) - .setFieldPath("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("int32") - .setDescription("one of int comment") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")).findFirst().orElseThrow()); - } - - @Test - public void customFieldVisitors() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); - - test.setFieldVisitor(new ProtobufModelVisitor>() { - @Override - public Stream> visitField(ProtobufField field, VisitContext context) { - if (field.fullName().equals("protobuf.MessageA.sequence_id")) { - return Stream.of(Pair.of( - new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - 0d)); - } else { - return Stream.of(); - } + @Test + public void noSchemaTest() throws IOException { + ProtobufDataset dataset = + ProtobufDataset.builder() + .setDataPlatformUrn(new DataPlatformUrn("kafka")) + .setProtocIn(getTestProtoc("protobuf", "messageA")) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.DEV) + .build(); + + assertNotNull(dataset); + assertEquals(2, dataset.getAllMetadataChangeProposals().count()); + assertEquals(8, dataset.getDatasetMCPs().size()); + assertEquals(0, dataset.getVisitorMCPs().size()); + } + + @Test + public void platformSchemaTest() throws IOException { + assertEquals( + getTestProtoSource("protobuf", "messageA"), + extractDocumentSchema(getTestProtobufDataset("protobuf", "messageA"))); + } + + @Test + public void messageA() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(9, testMetadata.getFields().size()); + + assertEquals("MessageA", extractAspect(test.getDatasetMCPs().get(0), "name")); + assertEquals("protobuf.MessageA", extractAspect(test.getDatasetMCPs().get(0), "qualifiedName")); + + assertEquals( + "platform.topic", extractCustomProperty(test.getDatasetMCPs().get(0), "kafka_topic")); + + assertEquals( + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + new InstitutionalMemoryMetadata() + .setDescription("Github Team") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setDescription("Slack Channel") + .setCreateStamp(TEST_AUDIT_STAMP) + .setUrl( + new Url( + "https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url( + "https://developers.google.com/protocol-buffers/docs/proto3#maps")))) + .data(), + test.getDatasetMCPs().get(1).getAspect().data()); + + assertEquals( + new Status().setRemoved(false).data(), + test.getDatasetMCPs().get(test.getDatasetMCPs().size() - 1).getAspect().data()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))) + .setNativeDataType("bytes") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading single line comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=bytes].sequence_id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].position") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Leading multiline comment\nSecond line of leading multiline comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].position")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageA].[type=int].total") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("uint32") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Detached comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageA].[type=int].total")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("uint64") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("Test repeated and trailing comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=long].repeated_num")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str") + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("string") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageA].[type=array].[type=string].repeated_str")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageB() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(24, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].id") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Int64Value") + .setDescription("wrapped int64") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].id")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType()))) + .setNativeDataType("google.protobuf.BoolValue") + .setDescription("Indicator") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=boolean].hot")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=string].value") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("message value") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=string].value")) + .findFirst() + .orElseThrow()); + } + + @Test + public void messageC() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(4, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + @SuppressWarnings("LineLength") + public void messageC2NestedOneOf() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageC2"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageC1,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + assertEquals(6, testMetadata.getFields().size()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageC2") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray())))) + .setNativeDataType("protobuf.MessageC3") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=string].normal")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))) + .setNativeDataType("oneof") + .setDescription("one of field comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string") + .setDescription("one of string comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=string].one_of_string")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setNullable(true) + .setIsPartOfKey(false) + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("int32") + .setDescription("one of int comment") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageC1].[type=protobuf_MessageC2].messageList.[type=array].[type=protobuf_MessageC3].list.[type=union].one_of_field.[type=int].one_of_int")) + .findFirst() + .orElseThrow()); + } + + @Test + public void customFieldVisitors() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageA"); + + test.setFieldVisitor( + new ProtobufModelVisitor>() { + @Override + public Stream> visitField( + ProtobufField field, VisitContext context) { + if (field.fullName().equals("protobuf.MessageA.sequence_id")) { + return Stream.of( + Pair.of( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + 0d)); + } else { + return Stream.of(); } + } }); - assertEquals(1, test.getSchemaMetadata().getFields().size()); - assertEquals(new SchemaField() - .setDescription("my comment") - .setNativeDataType("my type") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType()))), - test.getSchemaMetadata().getFields().get(0)); - } - - @Test - public void duplicateNested() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")).findFirst().orElseThrow()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setNativeDataType("protobuf.MessageA") - .setDescription("nested message a second time") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")).findFirst().orElseThrow()); - - Set firstNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".nested")) - .collect(Collectors.toSet()); - Set secondNested = testMetadata.getFields().stream().map(SchemaField::getFieldPath) - .filter(f -> f.contains(".secondary_nested")) - .collect(Collectors.toSet()); - - assertEquals(firstNested.size(), secondNested.size()); - assertEquals(firstNested.stream().map(s -> s.replace(".nested", ".secondary_nested")).collect(Collectors.toSet()), secondNested); - } - - @Test - public void googleTimestamp() throws IOException { - ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); - - assertEquals("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", - test.getDatasetUrn().toString()); - - SchemaMetadata testMetadata = test.getSchemaMetadata(); - - assertEquals(1, testMetadata.getVersion()); - - assertEquals(new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") - .setNullable(true) - .setIsPartOfKey(false) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setNativeDataType("google.protobuf.Timestamp") - .setDescription("google timestamp") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(test.getAuditStamp())), - testMetadata.getFields().stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")).findFirst().orElseThrow()); - } + assertEquals(1, test.getSchemaMetadata().getFields().size()); + assertEquals( + new SchemaField() + .setDescription("my comment") + .setNativeDataType("my type") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType()))), + test.getSchemaMetadata().getFields().get(0)); + } + + @Test + public void duplicateNested() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].nested")) + .findFirst() + .orElseThrow()); + + assertEquals( + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setNativeDataType("protobuf.MessageA") + .setDescription("nested message a second time") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=protobuf_MessageB].[type=protobuf_MessageA].secondary_nested")) + .findFirst() + .orElseThrow()); + + Set firstNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".nested")) + .collect(Collectors.toSet()); + Set secondNested = + testMetadata.getFields().stream() + .map(SchemaField::getFieldPath) + .filter(f -> f.contains(".secondary_nested")) + .collect(Collectors.toSet()); + + assertEquals(firstNested.size(), secondNested.size()); + assertEquals( + firstNested.stream() + .map(s -> s.replace(".nested", ".secondary_nested")) + .collect(Collectors.toSet()), + secondNested); + } + + @Test + public void googleTimestamp() throws IOException { + ProtobufDataset test = getTestProtobufDataset("protobuf", "messageB"); + + assertEquals( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageB,TEST)", + test.getDatasetUrn().toString()); + + SchemaMetadata testMetadata = test.getSchemaMetadata(); + + assertEquals(1, testMetadata.getVersion()); + + assertEquals( + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageB].[type=long].time") + .setNullable(true) + .setIsPartOfKey(false) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setNativeDataType("google.protobuf.Timestamp") + .setDescription("google timestamp") + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(test.getAuditStamp())), + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals("[version=2.0].[type=protobuf_MessageB].[type=long].time")) + .findFirst() + .orElseThrow()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java index 3a00edca8284a..e2599cb4c3f68 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/ProtobufUtilsTest.java @@ -1,45 +1,47 @@ package datahub.protobuf; -import com.google.protobuf.DescriptorProtos; -import com.google.protobuf.ExtensionRegistry; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - -import java.io.IOException; - import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; import static datahub.protobuf.TestFixtures.getTestProtoc; import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos; +import com.google.protobuf.ExtensionRegistry; +import datahub.protobuf.model.ProtobufGraph; +import java.io.IOException; +import org.junit.jupiter.api.Test; public class ProtobufUtilsTest { - @Test - public void registryTest() throws IOException, IllegalArgumentException { - byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); - DescriptorProtos.FileDescriptorSet fileSet = getTestProtobufFileSet("extended_protobuf", "messageA"); - ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); - DescriptorProtos.FileDescriptorSet fileSetWithRegistry = DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); - - assertNotEquals(fileSet, fileSetWithRegistry); - - /* - * - * Without the ExtensionRegistry we get field numbers instead of the names. - */ - ProtobufGraph graph = new ProtobufGraph(fileSet, null); - assertEquals("[meta.msg.classification_enum]: HighlyConfidential\n" - + "[meta.msg.team]: \"corpGroup:TeamB\"\n" - + "[meta.msg.team]: \"corpUser:datahub\"\n" - + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" - + "[meta.msg.domain]: \"Engineering\"\n" - + "[meta.msg.type]: ENTITY\n" - + "[meta.msg.bool_feature]: true\n" - + "[meta.msg.alert_channel]: \"#alerts\"\n" - + "[meta.msg.tag_list]: \"a, b, c\"\n" - + "[meta.msg.repeat_string]: \"a\"\n" - + "[meta.msg.repeat_string]: \"b\"\n" - + "[meta.msg.repeat_enum]: ENTITY\n" - + "[meta.msg.repeat_enum]: EVENT\n", graph.root().messageProto().getOptions().toString()); - } + @Test + public void registryTest() throws IOException, IllegalArgumentException { + byte[] protocBytes = getTestProtoc("extended_protobuf", "messageA").readAllBytes(); + DescriptorProtos.FileDescriptorSet fileSet = + getTestProtobufFileSet("extended_protobuf", "messageA"); + ExtensionRegistry registry = ProtobufUtils.buildRegistry(fileSet); + DescriptorProtos.FileDescriptorSet fileSetWithRegistry = + DescriptorProtos.FileDescriptorSet.parseFrom(protocBytes, registry); + + assertNotEquals(fileSet, fileSetWithRegistry); + + /* + * + * Without the ExtensionRegistry we get field numbers instead of the names. + */ + ProtobufGraph graph = new ProtobufGraph(fileSet, null); + assertEquals( + "[meta.msg.classification_enum]: HighlyConfidential\n" + + "[meta.msg.team]: \"corpGroup:TeamB\"\n" + + "[meta.msg.team]: \"corpUser:datahub\"\n" + + "[meta.msg.technical_owner]: \"corpGroup:TechnicalOwner\"\n" + + "[meta.msg.domain]: \"Engineering\"\n" + + "[meta.msg.type]: ENTITY\n" + + "[meta.msg.bool_feature]: true\n" + + "[meta.msg.alert_channel]: \"#alerts\"\n" + + "[meta.msg.tag_list]: \"a, b, c\"\n" + + "[meta.msg.repeat_string]: \"a\"\n" + + "[meta.msg.repeat_string]: \"b\"\n" + + "[meta.msg.repeat_enum]: ENTITY\n" + + "[meta.msg.repeat_enum]: EVENT\n", + graph.root().messageProto().getOptions().toString()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java index 6859e7fee9a60..7ee69149cf9dd 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/TestFixtures.java @@ -11,72 +11,85 @@ import datahub.event.MetadataChangeProposalWrapper; import datahub.protobuf.model.ProtobufGraph; import datahub.protobuf.visitors.VisitContext; - import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.Objects; public class TestFixtures { - private TestFixtures() { } + private TestFixtures() {} - public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); - public static final AuditStamp TEST_AUDIT_STAMP = new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(new CorpuserUrn("datahub")); + public static final DataPlatformUrn TEST_DATA_PLATFORM = new DataPlatformUrn("kafka"); + public static final AuditStamp TEST_AUDIT_STAMP = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(new CorpuserUrn("datahub")); - public static InputStream getTestProtoc(String protoPackage, String filename) { - return Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); - } + public static InputStream getTestProtoc(String protoPackage, String filename) { + return Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.protoc", protoPackage, filename))); + } - public static String getTestProtoSource(String protoPackage, String filename) throws IOException { - return new String(Objects.requireNonNull(TestFixtures.class.getClassLoader() - .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))).readAllBytes(), - StandardCharsets.UTF_8); - } + public static String getTestProtoSource(String protoPackage, String filename) throws IOException { + return new String( + Objects.requireNonNull( + TestFixtures.class + .getClassLoader() + .getResourceAsStream(String.format("%s/%s.proto", protoPackage, filename))) + .readAllBytes(), + StandardCharsets.UTF_8); + } - public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) throws IOException { - return ProtobufDataset.builder() - .setDataPlatformUrn(TEST_DATA_PLATFORM) - .setSchema(getTestProtoSource(protoPackage, filename)) - .setProtocIn(getTestProtoc(protoPackage, filename)) - .setAuditStamp(TEST_AUDIT_STAMP) - .setFabricType(FabricType.TEST) - .setGithubOrganization("myOrg") - .setSlackTeamId("SLACK123") - .build(); - } + public static ProtobufDataset getTestProtobufDataset(String protoPackage, String filename) + throws IOException { + return ProtobufDataset.builder() + .setDataPlatformUrn(TEST_DATA_PLATFORM) + .setSchema(getTestProtoSource(protoPackage, filename)) + .setProtocIn(getTestProtoc(protoPackage, filename)) + .setAuditStamp(TEST_AUDIT_STAMP) + .setFabricType(FabricType.TEST) + .setGithubOrganization("myOrg") + .setSlackTeamId("SLACK123") + .build(); + } - public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet(String protoPackage, String filename) throws IOException { - return DescriptorProtos.FileDescriptorSet - .parseFrom(getTestProtoc(protoPackage, filename).readAllBytes()); - } + public static DescriptorProtos.FileDescriptorSet getTestProtobufFileSet( + String protoPackage, String filename) throws IOException { + return DescriptorProtos.FileDescriptorSet.parseFrom( + getTestProtoc(protoPackage, filename).readAllBytes()); + } - public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { - return VisitContext.builder() - .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) - .auditStamp(TEST_AUDIT_STAMP); - } + public static VisitContext.VisitContextBuilder getVisitContextBuilder(String message) { + return VisitContext.builder() + .datasetUrn(new DatasetUrn(TEST_DATA_PLATFORM, message, FabricType.TEST)) + .auditStamp(TEST_AUDIT_STAMP); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); - } + public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename) + throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename)); + } - public static ProtobufGraph getTestProtobufGraph(String protoPackage, String filename, String messageName) throws IOException { - return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); - } + public static ProtobufGraph getTestProtobufGraph( + String protoPackage, String filename, String messageName) throws IOException { + return new ProtobufGraph(getTestProtobufFileSet(protoPackage, filename), messageName); + } - public static Object extractAspect(MetadataChangeProposalWrapper mcp, String aspect) { - return mcp.getAspect().data().get(aspect); - } + public static Object extractAspect( + MetadataChangeProposalWrapper mcp, String aspect) { + return mcp.getAspect().data().get(aspect); + } - public static Object extractCustomProperty(MetadataChangeProposalWrapper mcp, String key) { - return ((DataMap) extractAspect(mcp, "customProperties")).get(key); - } + public static Object extractCustomProperty( + MetadataChangeProposalWrapper mcp, String key) { + return ((DataMap) extractAspect(mcp, "customProperties")).get(key); + } - public static String extractDocumentSchema(ProtobufDataset protobufDataset) { - return String.valueOf(((DataMap) ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) - .get("com.linkedin.schema.KafkaSchema")).get("documentSchema")); - } + public static String extractDocumentSchema(ProtobufDataset protobufDataset) { + return String.valueOf( + ((DataMap) + ((DataMap) protobufDataset.getSchemaMetadata().getPlatformSchema().data()) + .get("com.linkedin.schema.KafkaSchema")) + .get("documentSchema")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java index 7c98077690d66..fed9f250b359f 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufEnumTest.java @@ -1,80 +1,87 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.EnumType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufEnumTest { - @Test - public void enumTest() { - EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder() - .setName("enum1") - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addEnumType(expectedEnum) - .build(); - - ProtobufEnum test = ProtobufEnum.enumBuilder() - .enumProto(expectedEnum) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + @Test + public void enumTest() { + EnumDescriptorProto expectedEnum = EnumDescriptorProto.newBuilder().setName("enum1").build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addEnumType(expectedEnum) + .build(); - assertEquals("enum1", test.name()); - assertEquals("protobuf.enum1", test.fullName()); - assertEquals("[type=enum]", test.fieldPathType()); - assertEquals("enum", test.nativeType()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); - assertEquals("", test.comment()); - } + ProtobufEnum test = + ProtobufEnum.enumBuilder() + .enumProto(expectedEnum) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); - @Test - public void enumEqualityTest() { - EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); - EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); - EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); + assertEquals("enum1", test.name()); + assertEquals("protobuf.enum1", test.fullName()); + assertEquals("[type=enum]", test.fieldPathType()); + assertEquals("enum", test.nativeType()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + assertEquals("ProtobufEnum[protobuf.enum1]", test.toString()); + assertEquals("", test.comment()); + } - DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .addAllEnumType(List.of(enum1, enum2, enum1Dup)) - .build(); + @Test + public void enumEqualityTest() { + EnumDescriptorProto enum1 = EnumDescriptorProto.newBuilder().setName("enum1").build(); + EnumDescriptorProto enum2 = EnumDescriptorProto.newBuilder().setName("enum2").build(); + EnumDescriptorProto enum1Dup = EnumDescriptorProto.newBuilder().setName("enum1").build(); - ProtobufEnum test1 = ProtobufEnum.enumBuilder().enumProto(enum1) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test2 = ProtobufEnum.enumBuilder().enumProto(enum2) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); - ProtobufEnum test1Dup = ProtobufEnum.enumBuilder().enumProto(enum1Dup) - .messageProto(expectedMessage) - .fileProto(expectedFile) - .build(); + DescriptorProto expectedMessage = DescriptorProto.newBuilder().build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .addAllEnumType(List.of(enum1, enum2, enum1Dup)) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } + ProtobufEnum test1 = + ProtobufEnum.enumBuilder() + .enumProto(enum1) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test2 = + ProtobufEnum.enumBuilder() + .enumProto(enum2) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + ProtobufEnum test1Dup = + ProtobufEnum.enumBuilder() + .enumProto(enum1Dup) + .messageProto(expectedMessage) + .fileProto(expectedFile) + .build(); + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java index 543b815f7f72b..6d4dc8bc4d585 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufFieldTest.java @@ -1,10 +1,12 @@ package datahub.protobuf.model; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.data.template.StringArray; - import com.linkedin.schema.ArrayType; import com.linkedin.schema.BooleanType; import com.linkedin.schema.BytesType; @@ -12,257 +14,313 @@ import com.linkedin.schema.FixedType; import com.linkedin.schema.NumberType; import com.linkedin.schema.RecordType; -import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.Arrays; import java.util.Set; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufFieldTest { - private static final DescriptorProto EXPECTED_MESSAGE_PROTO = DescriptorProto.newBuilder() - .setName("message1") + private static final DescriptorProto EXPECTED_MESSAGE_PROTO = + DescriptorProto.newBuilder().setName("message1").build(); + private static final FileDescriptorProto EXPECTED_FILE_PROTO = + FileDescriptorProto.newBuilder() + .addMessageType(EXPECTED_MESSAGE_PROTO) + .setPackage("protobuf") + .build(); + private static final ProtobufMessage EXPECTED_MESSAGE = + ProtobufMessage.builder() + .messageProto(EXPECTED_MESSAGE_PROTO) + .fileProto(EXPECTED_FILE_PROTO) + .build(); + + @Test + public void fieldTest() { + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder() + .setName("field1") + .setNumber(1) + .setType(FieldDescriptorProto.Type.TYPE_BYTES) .build(); - private static final FileDescriptorProto EXPECTED_FILE_PROTO = FileDescriptorProto.newBuilder() - .addMessageType(EXPECTED_MESSAGE_PROTO) + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder().setName("message1").addField(expectedField).build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage1) .setPackage("protobuf") .build(); - private static final ProtobufMessage EXPECTED_MESSAGE = ProtobufMessage.builder() - .messageProto(EXPECTED_MESSAGE_PROTO) - .fileProto(EXPECTED_FILE_PROTO) - .build(); - - - @Test - public void fieldTest() { - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setNumber(1) - .setType(FieldDescriptorProto.Type.TYPE_BYTES) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage1) - .setPackage("protobuf") - .build(); - ProtobufMessage expectedMessage = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); + ProtobufMessage expectedMessage = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(expectedMessage) - .build(); + ProtobufField test = + ProtobufField.builder().fieldProto(expectedField).protobufMessage(expectedMessage).build(); - assertEquals("field1", test.name()); - assertEquals("protobuf.message1.field1", test.fullName()); - assertEquals("[type=bytes]", test.fieldPathType()); - assertEquals("protobuf.message1", test.parentMessageName()); - assertEquals(expectedMessage1, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertNull(test.oneOfProto()); - assertEquals("bytes", test.nativeType()); - assertFalse(test.isMessage()); - assertEquals(1, test.sortWeight()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); - } + assertEquals("field1", test.name()); + assertEquals("protobuf.message1.field1", test.fullName()); + assertEquals("[type=bytes]", test.fieldPathType()); + assertEquals("protobuf.message1", test.parentMessageName()); + assertEquals(expectedMessage1, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertNull(test.oneOfProto()); + assertEquals("bytes", test.nativeType()); + assertFalse(test.isMessage()); + assertEquals(1, test.sortWeight()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + assertEquals("ProtobufField[protobuf.message1.field1]", test.toString()); + } - @Test - public void fieldPathTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void fieldPathTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format("[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void fieldPathTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; + @Test + public void fieldPathTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { + if (type.equals(FieldDescriptorProto.Type.TYPE_MESSAGE)) { assertEquals("[type=array].[type=protobuf_message1]", test.fieldPathType()); - } else if (type.name().endsWith("64")) { + } else if (type.name().endsWith("64")) { assertEquals("[type=array].[type=long]", test.fieldPathType()); - } else if (type.name().endsWith("32")) { + } else if (type.name().endsWith("32")) { assertEquals("[type=array].[type=int]", test.fieldPathType()); - } else if (type.name().endsWith("BOOL")) { + } else if (type.name().endsWith("BOOL")) { assertEquals("[type=array].[type=boolean]", test.fieldPathType()); - } else { - assertEquals(String.format("[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), test.fieldPathType()); - } - }); - } + } else { + assertEquals( + String.format( + "[type=array].[type=%s]", type.name().split("_")[1].toLowerCase()), + test.fieldPathType()); + } + }); + } - @Test - public void schemaFieldTypeTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - } else if (type.name().contains("FIXED")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new FixedType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("64") || type.name().endsWith("32") || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BOOL")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BooleanType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("STRING")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("ENUM")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new EnumType())), test.schemaFieldDataType()); - } else if (type.name().endsWith("BYTES")) { - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new BytesType())), test.schemaFieldDataType()); - } else { + if (Set.of("TYPE_MESSAGE", "TYPE_GROUP").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + } else if (type.name().contains("FIXED")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new FixedType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("64") + || type.name().endsWith("32") + || Set.of("TYPE_DOUBLE", "TYPE_FLOAT").contains(type.name())) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BOOL")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BooleanType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("STRING")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("ENUM")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new EnumType())), + test.schemaFieldDataType()); + } else if (type.name().endsWith("BYTES")) { + assertEquals( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new BytesType())), + test.schemaFieldDataType()); + } else { fail(String.format("Add test case for %s", type)); - } - }); - } + } + }); + } - @Test - public void schemaFieldTypeArrayTest() { - Arrays.stream(FieldDescriptorProto.Type.values()).forEach(type -> { - final FieldDescriptorProto expectedField; - if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { - expectedField = FieldDescriptorProto.newBuilder() + @Test + public void schemaFieldTypeArrayTest() { + Arrays.stream(FieldDescriptorProto.Type.values()) + .forEach( + type -> { + final FieldDescriptorProto expectedField; + if (type == FieldDescriptorProto.Type.TYPE_MESSAGE) { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setTypeName(EXPECTED_MESSAGE.fullName()) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } else { - expectedField = FieldDescriptorProto.newBuilder() + } else { + expectedField = + FieldDescriptorProto.newBuilder() .setName("field1") .setNumber(1) .setType(type) .setLabel(FieldDescriptorProto.Label.LABEL_REPEATED) .build(); - } + } - ProtobufField test = ProtobufField.builder() - .fieldProto(expectedField) - .protobufMessage(EXPECTED_MESSAGE) - .build(); + ProtobufField test = + ProtobufField.builder() + .fieldProto(expectedField) + .protobufMessage(EXPECTED_MESSAGE) + .build(); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new ArrayType() - .setNestedType(new StringArray()))), test.schemaFieldDataType()); - }); - } + assertEquals( + new SchemaFieldDataType() + .setType( + SchemaFieldDataType.Type.create( + new ArrayType().setNestedType(new StringArray()))), + test.schemaFieldDataType()); + }); + } - @Test - public void nestedTypeFieldTest() throws IOException { - ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); - SchemaMetadata testMetadata = test.getSchemaMetadata(); + @Test + public void nestedTypeFieldTest() throws IOException { + ProtobufDataset test = getTestProtobufDataset("extended_protobuf", "messageC"); + SchemaMetadata testMetadata = test.getSchemaMetadata(); - SchemaField nicknameField = testMetadata.getFields() - .stream() - .filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) - .findFirst() - .orElseThrow(); + SchemaField nicknameField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].nickname")) + .findFirst() + .orElseThrow(); - assertEquals("nickname info", nicknameField.getDescription()); + assertEquals("nickname info", nicknameField.getDescription()); - SchemaField profileUrlField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) - .findFirst() - .orElseThrow(); + SchemaField profileUrlField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg].[type=extended_protobuf_UserMsg_UserInfo].user_info.[type=string].profile_url")) + .findFirst() + .orElseThrow(); - assertEquals("profile url info", profileUrlField.getDescription()); + assertEquals("profile url info", profileUrlField.getDescription()); - SchemaField addressField = testMetadata.getFields() - .stream().filter(f -> f.getFieldPath() - .equals("[version=2.0].[type=extended_protobuf_UserMsg]." + SchemaField addressField = + testMetadata.getFields().stream() + .filter( + f -> + f.getFieldPath() + .equals( + "[version=2.0].[type=extended_protobuf_UserMsg]." + "[type=extended_protobuf_UserMsg_AddressMsg].address.[type=google_protobuf_StringValue].zipcode")) - .findFirst() - .orElseThrow(); + .findFirst() + .orElseThrow(); - assertEquals("Zip code, alphanumeric", addressField.getDescription()); - } + assertEquals("Zip code, alphanumeric", addressField.getDescription()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java index 80ffafff3f451..488222b87766d 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufGraphTest.java @@ -1,84 +1,99 @@ package datahub.protobuf.model; -import com.google.protobuf.DescriptorProtos.FileDescriptorSet; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.*; +import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.*; +import org.junit.jupiter.api.Test; public class ProtobufGraphTest { - @Test - public void autodetectRootMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageB", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageB.proto")).findFirst().get()).get().messageProto().getName()); - - assertEquals("MessageA", test.autodetectRootMessage( - fileset.getFileList().stream().filter(f -> f.getName().equals("protobuf/messageA.proto")).findFirst().get()).get().messageProto().getName()); - } - - @Test - public void autodetectRootMessageFailureTest() throws IOException { - FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); - } - - @Test - public void findMessageTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - assertEquals("MessageA", - test.findMessage("protobuf.MessageA").messageProto().getName()); - assertEquals("MessageB", - test.findMessage("protobuf.MessageB").messageProto().getName()); - - assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); - assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); - assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); - } - - @Test - public void commentTest() throws IOException { - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); - assertEquals("Test for one of", test.getComment()); - } - - @Test - public void equalityHashCodeTest() throws IOException { - ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); - ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); - FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); - - assertEquals(testB, new ProtobufGraph(filesetB)); - assertNotEquals(testA, new ProtobufGraph(filesetB)); - assertEquals(testA, testA); - assertNotEquals(testA, testB); - - HashSet graphs = new HashSet<>(); - graphs.add(testA); - graphs.add(testB); - graphs.add(new ProtobufGraph(filesetB)); - assertEquals(2, graphs.size()); - } - - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); - - List nestedMessages = test.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); - } + @Test + public void autodetectRootMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals( + "MessageB", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageB.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + + assertEquals( + "MessageA", + test.autodetectRootMessage( + fileset.getFileList().stream() + .filter(f -> f.getName().equals("protobuf/messageA.proto")) + .findFirst() + .get()) + .get() + .messageProto() + .getName()); + } + + @Test + public void autodetectRootMessageFailureTest() throws IOException { + FileDescriptorSet empty = getTestProtobufFileSet("protobuf", "messageEmpty"); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(empty)); + } + + @Test + public void findMessageTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + assertEquals("MessageA", test.findMessage("protobuf.MessageA").messageProto().getName()); + assertEquals("MessageB", test.findMessage("protobuf.MessageB").messageProto().getName()); + + assertThrows(IllegalArgumentException.class, () -> test.findMessage("not found")); + assertThrows(IllegalArgumentException.class, () -> new ProtobufGraph(fileset, "not found")); + assertEquals(test, new ProtobufGraph(fileset, "protobuf.MessageB")); + } + + @Test + public void commentTest() throws IOException { + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageC"); + assertEquals("Test for one of", test.getComment()); + } + + @Test + public void equalityHashCodeTest() throws IOException { + ProtobufGraph testA = getTestProtobufGraph("protobuf", "messageA"); + ProtobufGraph testB = getTestProtobufGraph("protobuf", "messageB"); + FileDescriptorSet filesetB = getTestProtobufFileSet("protobuf", "messageB"); + + assertEquals(testB, new ProtobufGraph(filesetB)); + assertNotEquals(testA, new ProtobufGraph(filesetB)); + assertEquals(testA, testA); + assertNotEquals(testA, testB); + + HashSet graphs = new HashSet<>(); + graphs.add(testA); + graphs.add(testB); + graphs.add(new ProtobufGraph(filesetB)); + assertEquals(2, graphs.size()); + } + + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph test = getTestProtobufGraph("protobuf", "messageB"); + + List nestedMessages = + test.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + assertEquals(2, nestedMessages.size(), "Expected 2 nested fields"); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java index e961b6ffd2d61..1d6b3907d76d9 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufMessageTest.java @@ -1,180 +1,168 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.linkedin.schema.MapType; import com.linkedin.schema.RecordType; import com.linkedin.schema.SchemaFieldDataType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufMessageTest { - @Test - public void messageTest() { - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMessage) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test = ProtobufMessage.builder() - .messageProto(expectedMessage) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("message1", test.name()); - assertEquals("protobuf.messageParent1.message1", test.fullName()); - assertEquals("protobuf.messageParent1.message1", test.nativeType()); - assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); - assertEquals(expectedFile, test.fileProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), test.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); - } - - @Test - public void mapTest() { - DescriptorProto expectedMap = DescriptorProto.newBuilder() - .setName("MapFieldEntry") - .build(); - DescriptorProto expectedParentMessage1 = DescriptorProto.newBuilder() - .setName("messageParent1") - .addNestedType(expectedMap) - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMap) - .setPackage("protobuf") - .build(); - - ProtobufMessage testParent = ProtobufMessage.builder() - .messageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage testMap = ProtobufMessage.builder() - .messageProto(expectedMap) - .parentMessageProto(expectedParentMessage1) - .fileProto(expectedFile) - .build(); - - assertEquals("messageParent1", testParent.name()); - assertEquals("protobuf.messageParent1", testParent.fullName()); - assertEquals("protobuf.messageParent1", testParent.nativeType()); - assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); - assertEquals(expectedFile, testParent.fileProto()); - assertEquals(expectedParentMessage1, testParent.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), testParent.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); - - assertEquals("MapFieldEntry", testMap.name()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); - assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); - assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); - assertEquals(expectedFile, testMap.fileProto()); - assertEquals(expectedMap, testMap.messageProto()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), testMap.schemaFieldDataType()); - assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); - } - - @Test - public void messageEqualityTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .build(); - DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) - .setPackage("protobuf") - .build(); - - - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile) - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage2) - .fileProto(expectedFile) - .build(); - ProtobufMessage test1Dup = ProtobufMessage.builder() - .messageProto(expectedMessage1Dup) - .fileProto(expectedFile) - .build(); - - assertEquals(test1, test1Dup); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); - } - - @Test - public void majorVersionTest() { - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .build(); - - FileDescriptorProto expectedFile1 = FileDescriptorProto.newBuilder() - .setName("zendesk/v1/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test1 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile1) - .build(); - assertEquals(1, test1.majorVersion()); - - FileDescriptorProto expectedFile2 = FileDescriptorProto.newBuilder() - .setName("zendesk/v2/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test2 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile2) - .build(); - assertEquals(2, test2.majorVersion()); - - FileDescriptorProto expectedFile3 = FileDescriptorProto.newBuilder() - .setName("zendesk/platform/test.proto") - .setPackage("protobuf") - .build(); - ProtobufMessage test3 = ProtobufMessage.builder() - .messageProto(expectedMessage1) - .fileProto(expectedFile3) - .build(); - assertEquals(1, test3.majorVersion()); - } + @Test + public void messageTest() { + DescriptorProto expectedMessage = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder() + .setName("messageParent1") + .addNestedType(expectedMessage) + .build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage test = + ProtobufMessage.builder() + .messageProto(expectedMessage) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("message1", test.name()); + assertEquals("protobuf.messageParent1.message1", test.fullName()); + assertEquals("protobuf.messageParent1.message1", test.nativeType()); + assertEquals("[type=protobuf_messageParent1_message1]", test.fieldPathType()); + assertEquals(expectedFile, test.fileProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + test.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.message1]", test.toString()); + } + + @Test + public void mapTest() { + DescriptorProto expectedMap = DescriptorProto.newBuilder().setName("MapFieldEntry").build(); + DescriptorProto expectedParentMessage1 = + DescriptorProto.newBuilder().setName("messageParent1").addNestedType(expectedMap).build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder().addMessageType(expectedMap).setPackage("protobuf").build(); + + ProtobufMessage testParent = + ProtobufMessage.builder() + .messageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + ProtobufMessage testMap = + ProtobufMessage.builder() + .messageProto(expectedMap) + .parentMessageProto(expectedParentMessage1) + .fileProto(expectedFile) + .build(); + + assertEquals("messageParent1", testParent.name()); + assertEquals("protobuf.messageParent1", testParent.fullName()); + assertEquals("protobuf.messageParent1", testParent.nativeType()); + assertEquals("[type=protobuf_messageParent1]", testParent.fieldPathType()); + assertEquals(expectedFile, testParent.fileProto()); + assertEquals(expectedParentMessage1, testParent.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType())), + testParent.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1]", testParent.toString()); + + assertEquals("MapFieldEntry", testMap.name()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.fullName()); + assertEquals("protobuf.messageParent1.MapFieldEntry", testMap.nativeType()); + assertEquals("[type=protobuf_messageParent1_MapFieldEntry]", testMap.fieldPathType()); + assertEquals(expectedFile, testMap.fileProto()); + assertEquals(expectedMap, testMap.messageProto()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new MapType())), + testMap.schemaFieldDataType()); + assertEquals("ProtobufMessage[protobuf.messageParent1.MapFieldEntry]", testMap.toString()); + } + + @Test + public void messageEqualityTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + DescriptorProto expectedMessage2 = DescriptorProto.newBuilder().setName("message2").build(); + DescriptorProto expectedMessage1Dup = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2, expectedMessage1Dup)) + .setPackage("protobuf") + .build(); + + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile).build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage2).fileProto(expectedFile).build(); + ProtobufMessage test1Dup = + ProtobufMessage.builder().messageProto(expectedMessage1Dup).fileProto(expectedFile).build(); + + assertEquals(test1, test1Dup); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2), Stream.of(test1, test2, test1Dup).collect(Collectors.toSet())); + } + + @Test + public void majorVersionTest() { + DescriptorProto expectedMessage1 = DescriptorProto.newBuilder().setName("message1").build(); + + FileDescriptorProto expectedFile1 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v1/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test1 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile1).build(); + assertEquals(1, test1.majorVersion()); + + FileDescriptorProto expectedFile2 = + FileDescriptorProto.newBuilder() + .setName("zendesk/v2/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test2 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile2).build(); + assertEquals(2, test2.majorVersion()); + + FileDescriptorProto expectedFile3 = + FileDescriptorProto.newBuilder() + .setName("zendesk/platform/test.proto") + .setPackage("protobuf") + .build(); + ProtobufMessage test3 = + ProtobufMessage.builder().messageProto(expectedMessage1).fileProto(expectedFile3).build(); + assertEquals(1, test3.majorVersion()); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java index 438e0a79206bd..c8bd8a322aad5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/model/ProtobufOneOfFieldTest.java @@ -1,121 +1,146 @@ package datahub.protobuf.model; +import static org.junit.jupiter.api.Assertions.*; + import com.google.protobuf.DescriptorProtos.DescriptorProto; import com.google.protobuf.DescriptorProtos.FieldDescriptorProto; import com.google.protobuf.DescriptorProtos.FileDescriptorProto; import com.google.protobuf.DescriptorProtos.OneofDescriptorProto; import com.linkedin.schema.SchemaFieldDataType; import com.linkedin.schema.UnionType; -import org.junit.jupiter.api.Test; - import java.util.List; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class ProtobufOneOfFieldTest { - @Test - public void oneOfTest() { - OneofDescriptorProto expectedOneOf = OneofDescriptorProto.newBuilder() - .setName("oneof1") - .build(); - FieldDescriptorProto expectedField = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage = DescriptorProto.newBuilder() - .setName("message1") - .addOneofDecl(expectedOneOf) - .addField(expectedField) - .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addMessageType(expectedMessage) - .setPackage("protobuf") - .build(); + @Test + public void oneOfTest() { + OneofDescriptorProto expectedOneOf = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + FieldDescriptorProto expectedField = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + DescriptorProto expectedMessage = + DescriptorProto.newBuilder() + .setName("message1") + .addOneofDecl(expectedOneOf) + .addField(expectedField) + .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addMessageType(expectedMessage) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage).build()) - .build(); + ProtobufOneOfField test = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage) + .build()) + .build(); - assertEquals("oneof1", test.name()); - assertEquals("protobuf.message1.oneof1", test.fullName()); - assertEquals("[type=union]", test.fieldPathType()); - assertEquals("oneof", test.nativeType()); - assertEquals(expectedOneOf, test.oneOfProto()); - assertEquals(expectedMessage, test.messageProto()); - assertEquals(expectedFile, test.fileProto()); - assertFalse(test.isMessage()); - assertEquals(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), test.schemaFieldDataType()); - assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); - } + assertEquals("oneof1", test.name()); + assertEquals("protobuf.message1.oneof1", test.fullName()); + assertEquals("[type=union]", test.fieldPathType()); + assertEquals("oneof", test.nativeType()); + assertEquals(expectedOneOf, test.oneOfProto()); + assertEquals(expectedMessage, test.messageProto()); + assertEquals(expectedFile, test.fileProto()); + assertFalse(test.isMessage()); + assertEquals( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType())), + test.schemaFieldDataType()); + assertEquals("ProtobufOneOf[protobuf.message1.oneof1]", test.toString()); + } - @Test - public void oneOfEqualityTest() { - OneofDescriptorProto oneof1Message1 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof2Message1 = OneofDescriptorProto.newBuilder().setName("oneof2").build(); - OneofDescriptorProto oneof1Message2 = OneofDescriptorProto.newBuilder().setName("oneof1").build(); - OneofDescriptorProto oneof1Message1Dup = OneofDescriptorProto.newBuilder().setName("oneof1").build(); + @Test + public void oneOfEqualityTest() { + OneofDescriptorProto oneof1Message1 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof2Message1 = + OneofDescriptorProto.newBuilder().setName("oneof2").build(); + OneofDescriptorProto oneof1Message2 = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); + OneofDescriptorProto oneof1Message1Dup = + OneofDescriptorProto.newBuilder().setName("oneof1").build(); - FieldDescriptorProto expectedField1 = FieldDescriptorProto.newBuilder() - .setName("field1") - .setOneofIndex(0) - .build(); - FieldDescriptorProto expectedField2 = FieldDescriptorProto.newBuilder() - .setName("field2") - .setOneofIndex(1) - .build(); - FieldDescriptorProto expectedField1Dup = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(3) - .build(); - DescriptorProto expectedMessage1 = DescriptorProto.newBuilder() - .setName("message1") - .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) - .addField(expectedField1) - .addField(expectedField2) - .addField(expectedField1Dup) - .build(); + FieldDescriptorProto expectedField1 = + FieldDescriptorProto.newBuilder().setName("field1").setOneofIndex(0).build(); + FieldDescriptorProto expectedField2 = + FieldDescriptorProto.newBuilder().setName("field2").setOneofIndex(1).build(); + FieldDescriptorProto expectedField1Dup = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(3).build(); + DescriptorProto expectedMessage1 = + DescriptorProto.newBuilder() + .setName("message1") + .addAllOneofDecl(List.of(oneof1Message1, oneof2Message1, oneof1Message1Dup)) + .addField(expectedField1) + .addField(expectedField2) + .addField(expectedField1Dup) + .build(); - FieldDescriptorProto expectedField3 = FieldDescriptorProto.newBuilder() - .setName("field3") - .setOneofIndex(0) - .build(); - DescriptorProto expectedMessage2 = DescriptorProto.newBuilder() - .setName("message2") - .addAllOneofDecl(List.of(oneof1Message2)) - .addField(expectedField3) - .build(); + FieldDescriptorProto expectedField3 = + FieldDescriptorProto.newBuilder().setName("field3").setOneofIndex(0).build(); + DescriptorProto expectedMessage2 = + DescriptorProto.newBuilder() + .setName("message2") + .addAllOneofDecl(List.of(oneof1Message2)) + .addField(expectedField3) + .build(); - FileDescriptorProto expectedFile = FileDescriptorProto.newBuilder() - .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) - .setPackage("protobuf") - .build(); + FileDescriptorProto expectedFile = + FileDescriptorProto.newBuilder() + .addAllMessageType(List.of(expectedMessage1, expectedMessage2)) + .setPackage("protobuf") + .build(); - ProtobufOneOfField test1 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test1Dup = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField1) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test2 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField2) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage1).build()) - .build(); - ProtobufOneOfField test3 = ProtobufOneOfField.oneOfBuilder() - .fieldProto(expectedField3) - .protobufMessage(ProtobufMessage.builder().fileProto(expectedFile).messageProto(expectedMessage2).build()) - .build(); + ProtobufOneOfField test1 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test1Dup = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField1) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test2 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField2) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage1) + .build()) + .build(); + ProtobufOneOfField test3 = + ProtobufOneOfField.oneOfBuilder() + .fieldProto(expectedField3) + .protobufMessage( + ProtobufMessage.builder() + .fileProto(expectedFile) + .messageProto(expectedMessage2) + .build()) + .build(); - assertEquals(test1, test1Dup); - assertNotEquals(test1, test3); - assertNotEquals(test1, test2); - assertEquals(Set.of(test1, test2, test3), Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); - } + assertEquals(test1, test1Dup); + assertNotEquals(test1, test3); + assertNotEquals(test1, test2); + assertEquals( + Set.of(test1, test2, test3), + Stream.of(test1, test2, test3, test1Dup).collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java index ceebefb3a207e..2fc5f3834a749 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/VisitContextTest.java @@ -1,38 +1,43 @@ package datahub.protobuf.visitors; +import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import datahub.protobuf.model.FieldTypeEdge; import datahub.protobuf.model.ProtobufElement; import datahub.protobuf.model.ProtobufGraph; -import org.jgrapht.GraphPath; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufFileSet; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static org.junit.jupiter.api.Assertions.assertNotEquals; +import org.jgrapht.GraphPath; +import org.junit.jupiter.api.Test; public class VisitContextTest { - @Test - public void duplicateNestedTest() throws IOException { - FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); - VisitContext test = VisitContext.builder().graph(graph).build(); - - List nestedMessages = graph.vertexSet().stream().filter(f -> f.name().endsWith("nested")) - .collect(Collectors.toList()); - - List> nestedPathsA = graph.getAllPaths(graph.root(), nestedMessages.get(0)); - List> nestedPathsB = graph.getAllPaths(graph.root(), nestedMessages.get(1)); - assertNotEquals(nestedPathsA, nestedPathsB); - - Set fieldPathsA = nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); - Set fieldPathsB = nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); - assertNotEquals(fieldPathsA, fieldPathsB); - } + @Test + public void duplicateNestedTest() throws IOException { + FileDescriptorSet fileset = getTestProtobufFileSet("protobuf", "messageB"); + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageB"); + VisitContext test = VisitContext.builder().graph(graph).build(); + + List nestedMessages = + graph.vertexSet().stream() + .filter(f -> f.name().endsWith("nested")) + .collect(Collectors.toList()); + + List> nestedPathsA = + graph.getAllPaths(graph.root(), nestedMessages.get(0)); + List> nestedPathsB = + graph.getAllPaths(graph.root(), nestedMessages.get(1)); + assertNotEquals(nestedPathsA, nestedPathsB); + + Set fieldPathsA = + nestedPathsA.stream().map(test::getFieldPath).collect(Collectors.toSet()); + Set fieldPathsB = + nestedPathsB.stream().map(test::getFieldPath).collect(Collectors.toSet()); + assertNotEquals(fieldPathsA, fieldPathsB); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java index fb51f42a6c759..de9a0f5ec4abe 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DatasetVisitorTest.java @@ -1,56 +1,59 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.DatasetUrn; import com.linkedin.data.template.RecordTemplate; -import org.junit.jupiter.api.Test; - +import datahub.event.MetadataChangeProposalWrapper; +import datahub.protobuf.ProtobufDataset; +import datahub.protobuf.visitors.ProtobufModelVisitor; +import datahub.protobuf.visitors.VisitContext; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import datahub.protobuf.ProtobufDataset; -import datahub.protobuf.visitors.ProtobufModelVisitor; -import datahub.protobuf.visitors.VisitContext; -import datahub.event.MetadataChangeProposalWrapper; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DatasetVisitorTest { - @Test - public void protocBase64Test() throws URISyntaxException, IOException { - String expected = "23454345452345233455"; - DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); - - List> changes = - test.visitGraph( - VisitContext.builder() - .auditStamp(TEST_AUDIT_STAMP) - .datasetUrn(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) - .graph(getTestProtobufGraph("protobuf", "messageA")).build() - ).collect(Collectors.toList()); - - assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); - } - - @Test - public void customDescriptionVisitors() throws IOException { - ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); - - DatasetVisitor test = DatasetVisitor.builder() - .descriptionVisitor(new ProtobufModelVisitor() { - @Override - public Stream visitGraph(VisitContext context) { - return Stream.of("Test Description"); - } + @Test + public void protocBase64Test() throws URISyntaxException, IOException { + String expected = "23454345452345233455"; + DatasetVisitor test = DatasetVisitor.builder().protocBase64(expected).build(); + + List> changes = + test.visitGraph( + VisitContext.builder() + .auditStamp(TEST_AUDIT_STAMP) + .datasetUrn( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)")) + .graph(getTestProtobufGraph("protobuf", "messageA")) + .build()) + .collect(Collectors.toList()); + + assertEquals(expected, extractCustomProperty(changes.get(0), "protoc")); + } + + @Test + public void customDescriptionVisitors() throws IOException { + ProtobufDataset testDataset = getTestProtobufDataset("protobuf", "messageA"); + + DatasetVisitor test = + DatasetVisitor.builder() + .descriptionVisitor( + new ProtobufModelVisitor() { + @Override + public Stream visitGraph(VisitContext context) { + return Stream.of("Test Description"); + } }) - .build(); - testDataset.setDatasetVisitor(test); + .build(); + testDataset.setDatasetVisitor(test); - assertEquals("Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); - } + assertEquals( + "Test Description", extractAspect(testDataset.getDatasetMCPs().get(0), "description")); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java index 4edc65b29d663..679048fb48a53 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DescriptionVisitorTest.java @@ -1,26 +1,27 @@ package datahub.protobuf.visitors.dataset; -import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import datahub.protobuf.model.ProtobufGraph; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DescriptionVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("protobuf", "messageC2", "protobuf.MessageC2"); - DescriptionVisitor test = new DescriptionVisitor(); + DescriptionVisitor test = new DescriptionVisitor(); - assertEquals(Set.of("This contains nested type\n\nDescription for MessageC2"), - graph.accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of("This contains nested type\n\nDescription for MessageC2"), + graph + .accept(getVisitContextBuilder("protobuf.MessageC2"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java index b3fa2c8fd081b..c24fc30766f0e 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/DomainVisitorTest.java @@ -1,28 +1,29 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class DomainVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - DomainVisitor test = new DomainVisitor(); + DomainVisitor test = new DomainVisitor(); - assertEquals(Set.of(Urn.createFromTuple("domain", "engineering")), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } + assertEquals( + Set.of(Urn.createFromTuple("domain", "engineering")), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java index 09fc0a3765436..a57916441bfcb 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/InstitutionalMemoryVisitorTest.java @@ -1,68 +1,70 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class InstitutionalMemoryVisitorTest { - @Test - public void messageATest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Slack Channel") - .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("Github Team") - .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 1") - .setUrl(new Url("https://some/link")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 2") - .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA Reference 3") - .setUrl(new Url("https://github.com/apache/kafka")), - new InstitutionalMemoryMetadata() - .setCreateStamp(TEST_AUDIT_STAMP) - .setDescription("MessageA.map_field Reference 1") - .setUrl(new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps")) - ), - - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("protobuf.MessageA"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageATest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of( + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Slack Channel") + .setUrl(new Url("https://slack.com/app_redirect?channel=test-slack&team=SLACK123")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("Github Team") + .setUrl(new Url("https://github.com/orgs/myOrg/teams/teama")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 1") + .setUrl(new Url("https://some/link")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 2") + .setUrl(new Url("https://www.google.com/search?q=protobuf+messages")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA Reference 3") + .setUrl(new Url("https://github.com/apache/kafka")), + new InstitutionalMemoryMetadata() + .setCreateStamp(TEST_AUDIT_STAMP) + .setDescription("MessageA.map_field Reference 1") + .setUrl( + new Url("https://developers.google.com/protocol-buffers/docs/proto3#maps"))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageBTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), - getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("protobuf.MessageB"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageBTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void messageCTest() throws IOException { - InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void messageCTest() throws IOException { + InstitutionalMemoryVisitor test = new InstitutionalMemoryVisitor("SLACK123", "myOrg"); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java index 971500b5f43a2..5f8572cf6ddd8 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/KafkaTopicPropertyVisitorTest.java @@ -1,36 +1,39 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class KafkaTopicPropertyVisitorTest { - @Test - public void visitorTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), - getTestProtobufGraph("protobuf", "messageA") - .accept(getVisitContextBuilder("MessageB"), - List.of(test)).collect(Collectors.toList())); - } + @Test + public void visitorTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties(new StringMap(Map.of("kafka_topic", "platform.topic")))), + getTestProtobufGraph("protobuf", "messageA") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toList())); + } - @Test - public void visitorEmptyTest() throws IOException { - KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); - assertEquals(Set.of(), getTestProtobufGraph("protobuf", "messageB") - .accept(getVisitContextBuilder("MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorEmptyTest() throws IOException { + KafkaTopicPropertyVisitor test = new KafkaTopicPropertyVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("protobuf", "messageB") + .accept(getVisitContextBuilder("MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java index b087c683f9ffe..1b0aff28eb517 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/OwnershipVisitorTest.java @@ -1,58 +1,62 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import datahub.protobuf.model.ProtobufGraph; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class OwnershipVisitorTest { - @Test - public void visitorTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "teamb")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")), - new Owner() - .setType(OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpGroup", "technicalowner")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)).collect(Collectors.toSet())); - } - - @Test - public void visitorSingleOwnerTest() throws IOException { - ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); - - OwnershipVisitor test = new OwnershipVisitor(); - - assertEquals(Set.of(new Owner() - .setType(OwnershipType.DATA_STEWARD) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - .setOwner(Urn.createFromTuple("corpuser", "datahub")) - ), - graph.accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void visitorTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageA"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "teamb")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub")), + new Owner() + .setType(OwnershipType.TECHNICAL_OWNER) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpGroup", "technicalowner"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageA"), List.of(test)) + .collect(Collectors.toSet())); + } + + @Test + public void visitorSingleOwnerTest() throws IOException { + ProtobufGraph graph = getTestProtobufGraph("extended_protobuf", "messageB"); + + OwnershipVisitor test = new OwnershipVisitor(); + + assertEquals( + Set.of( + new Owner() + .setType(OwnershipType.DATA_STEWARD) + .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) + .setOwner(Urn.createFromTuple("corpuser", "datahub"))), + graph + .accept(getVisitContextBuilder("extended_protobuf.MessageB"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java index dc3647cdf34c8..13912100f28a5 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/PropertyVisitorTest.java @@ -1,58 +1,68 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static java.util.Map.entry; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.data.template.StringMap; import com.linkedin.dataset.DatasetProperties; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static java.util.Map.entry; -import static org.junit.jupiter.api.Assertions.assertEquals; +public class PropertyVisitorTest { + @Test + public void extendedMessageTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); -public class PropertyVisitorTest { + List actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); + + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("classification_enum", "HighlyConfidential"), + entry("bool_feature", "true"), + entry("alert_channel", "#alerts"), + entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), + entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), + entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), + entry("tag_list", "a, b, c"), + entry("domain", "Engineering"), + entry("repeat_string", "[\"a\",\"b\"]"), + entry("type", "ENTITY"))))), + actual); + } + + @Test + public void extendedFieldTest() throws IOException { + PropertyVisitor test = new PropertyVisitor(); + List actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toList()); - @Test - public void extendedMessageTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - - List actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of( - new DatasetProperties().setCustomProperties(new StringMap(Map.ofEntries( - entry("classification_enum", "HighlyConfidential"), - entry("bool_feature", "true"), - entry("alert_channel", "#alerts"), - entry("repeat_enum", "[\"ENTITY\",\"EVENT\"]"), - entry("team", "[\"corpGroup:TeamB\",\"corpUser:datahub\"]"), - entry("technical_owner", "[\"corpGroup:TechnicalOwner\"]"), - entry("tag_list", "a, b, c"), - entry("domain", "Engineering"), - entry("repeat_string", "[\"a\",\"b\"]"), - entry("type", "ENTITY"))))), - actual); - } - - @Test - public void extendedFieldTest() throws IOException { - PropertyVisitor test = new PropertyVisitor(); - List actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toList()); - - assertEquals(List.of(new DatasetProperties() - .setCustomProperties(new StringMap(Map.ofEntries( - entry("data_steward", "corpUser:datahub"), - entry("deprecated", "true"), - entry("deprecation_note", "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), - entry("deprecation_time", "1649689387") - )))), actual); - } + assertEquals( + List.of( + new DatasetProperties() + .setCustomProperties( + new StringMap( + Map.ofEntries( + entry("data_steward", "corpUser:datahub"), + entry("deprecated", "true"), + entry( + "deprecation_note", + "[\"Deprecated for this other message.\",\"Drop in replacement.\"]"), + entry("deprecation_time", "1649689387"))))), + actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java index c140a798ef6e6..f734c00bb76e0 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/dataset/TermAssociationVisitorTest.java @@ -1,42 +1,42 @@ package datahub.protobuf.visitors.dataset; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.GlossaryTermUrn; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TermAssociationVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals(Set.of( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - ), - getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), - List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("a")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("b")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.ENTITY")), + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("MetaEnumExample.EVENT")), + new GlossaryTermAssociation() + .setUrn(new GlossaryTermUrn("Classification.HighlyConfidential"))), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - TermAssociationVisitor test = new TermAssociationVisitor(); - assertEquals( - Set.of(), - getTestProtobufGraph("extended_protobuf", "messageB"). - accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)).collect(Collectors.toSet())); - } + @Test + public void extendedFieldTest() throws IOException { + TermAssociationVisitor test = new TermAssociationVisitor(); + assertEquals( + Set.of(), + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .collect(Collectors.toSet())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java index 57a8cf1d63cd2..eec397011a4ce 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/ProtobufExtensionFieldVisitorTest.java @@ -1,5 +1,8 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -15,207 +18,303 @@ import com.linkedin.schema.StringType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class ProtobufExtensionFieldVisitorTest { - @Test - public void extendedMessageTest() throws IOException, URISyntaxException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List actual = getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedMessageTest() throws IOException, URISyntaxException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List actual = + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List expected = Stream.of( + List expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.ENTITY")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.Sensitive")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.ENTITY"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn("Classification.Sensitive")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); - + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } - @Test - public void extendedFieldTest() throws IOException { - ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); - List actual = getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList()); + @Test + public void extendedFieldTest() throws IOException { + ProtobufExtensionFieldVisitor test = new ProtobufExtensionFieldVisitor(); + List actual = + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - List expected = Stream.of( + List expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("person name") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("person name") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("unique identifier for a given person") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 2), + new SchemaField() + .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("unique identifier for a given person") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].email") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("official email address") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("Classification.HighlyConfidential")) - )).setAuditStamp(TEST_AUDIT_STAMP)), - 3), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].email") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("official email address") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + new GlossaryTermAssociation() + .setUrn( + new GlossaryTermUrn( + "Classification.HighlyConfidential")))) + .setAuditStamp(TEST_AUDIT_STAMP)), + 3), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("department name of the person") - .setNativeDataType("extended_protobuf.Department") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new RecordType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("department name of the person") + .setNativeDataType("extended_protobuf.Department") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new RecordType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") - .setNullable(false) - .setIsPartOfKey(true) - .setDescription("") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=int].id") + .setNullable(false) + .setIsPartOfKey(true) + .setDescription("") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 4), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=extended_protobuf_Department].dept.[type=string].name") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray())) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 4), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") - .setNullable(true) - .setIsPartOfKey(false) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - new TagAssociation().setTag(new TagUrn("MetaEnumExample.EVENT")), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=extended_protobuf_Person].[type=string].test_coverage") + .setNullable(true) + .setIsPartOfKey(false) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + new TagAssociation() + .setTag(new TagUrn("MetaEnumExample.EVENT")), new TagAssociation().setTag(new TagUrn("d")), new TagAssociation().setTag(new TagUrn("deprecated")), new TagAssociation().setTag(new TagUrn("e")), new TagAssociation().setTag(new TagUrn("f")), - new TagAssociation().setTag(new TagUrn("product_type.my type")), - new TagAssociation().setTag(new TagUrn("product_type_bool")) - ))) - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray()).setAuditStamp(TEST_AUDIT_STAMP)), - 5) - ).map(Pair::getFirst).collect(Collectors.toList()); + new TagAssociation() + .setTag(new TagUrn("product_type.my type")), + new TagAssociation() + .setTag(new TagUrn("product_type_bool"))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms(new GlossaryTermAssociationArray()) + .setAuditStamp(TEST_AUDIT_STAMP)), + 5)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - assertEquals(expected, actual); - } + assertEquals(expected, actual); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java index 1da29b5320637..af31a80d3b53a 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/field/SchemaFieldVisitorTest.java @@ -1,5 +1,9 @@ package datahub.protobuf.visitors.field; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.schema.NumberType; import com.linkedin.schema.SchemaField; import com.linkedin.schema.SchemaFieldDataType; @@ -7,62 +11,73 @@ import com.linkedin.schema.UnionType; import com.linkedin.util.Pair; import datahub.protobuf.ProtobufDataset; -import org.junit.jupiter.api.Test; - import java.io.IOException; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class SchemaFieldVisitorTest { - @Test - public void visitorTest() throws IOException { - List expected = Stream.of( + @Test + public void visitorTest() throws IOException { + List expected = + Stream.of( Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") - .setNullable(true) - .setDescription("one of field comment") - .setNativeDataType("oneof") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new UnionType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field") + .setNullable(true) + .setDescription("one of field comment") + .setNativeDataType("oneof") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new UnionType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") - .setNullable(true) - .setDescription("one of string comment") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 1), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=string].one_of_string") + .setNullable(true) + .setDescription("one of string comment") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 1), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") - .setNullable(true) - .setDescription("one of int comment") - .setNativeDataType("int32") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new NumberType()))), - 2), + new SchemaField() + .setFieldPath( + "[version=2.0].[type=protobuf_MessageC].[type=union].one_of_field.[type=int].one_of_int") + .setNullable(true) + .setDescription("one of int comment") + .setNativeDataType("int32") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new NumberType()))), + 2), Pair.of( - new SchemaField() - .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") - .setNullable(true) - .setDescription("") - .setNativeDataType("string") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))), - 4) - ).map(Pair::getFirst).collect(Collectors.toList()); + new SchemaField() + .setFieldPath("[version=2.0].[type=protobuf_MessageC].[type=string].normal") + .setNullable(true) + .setDescription("") + .setNativeDataType("string") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))), + 4)) + .map(Pair::getFirst) + .collect(Collectors.toList()); - SchemaFieldVisitor test = new SchemaFieldVisitor(); - assertEquals(expected, getTestProtobufGraph("protobuf", "messageC") - .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) - .sorted(ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing(ProtobufDataset.COMPARE_BY_FIELD_PATH)) - .map(Pair::getFirst) - .collect(Collectors.toList())); - } + SchemaFieldVisitor test = new SchemaFieldVisitor(); + assertEquals( + expected, + getTestProtobufGraph("protobuf", "messageC") + .accept(getVisitContextBuilder("protobuf.MessageC"), List.of(test)) + .sorted( + ProtobufDataset.COMPARE_BY_ROOT_MESSAGE_FIELD_WEIGHT.thenComparing( + ProtobufDataset.COMPARE_BY_FIELD_PATH)) + .map(Pair::getFirst) + .collect(Collectors.toList())); + } } diff --git a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java index 84ab1312a7d8a..258d816d9d1da 100644 --- a/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java +++ b/metadata-integration/java/datahub-protobuf/src/test/java/datahub/protobuf/visitors/tag/TagVisitorTest.java @@ -1,89 +1,69 @@ package datahub.protobuf.visitors.tag; +import static datahub.protobuf.TestFixtures.getTestProtobufGraph; +import static datahub.protobuf.TestFixtures.getVisitContextBuilder; +import static org.junit.jupiter.api.Assertions.assertEquals; + import com.linkedin.tag.TagProperties; -import datahub.protobuf.visitors.tags.TagVisitor; import datahub.event.MetadataChangeProposalWrapper; -import org.junit.jupiter.api.Test; - +import datahub.protobuf.visitors.tags.TagVisitor; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.stream.Collectors; - -import static datahub.protobuf.TestFixtures.getTestProtobufGraph; -import static datahub.protobuf.TestFixtures.getVisitContextBuilder; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.junit.jupiter.api.Test; public class TagVisitorTest { - @Test - public void extendedMessageTest() throws IOException { - TagVisitor test = new TagVisitor(); - assertEquals(Set.of( - new TagProperties() - .setName("bool_feature") - .setDescription("meta.msg.bool_feature is true."), - new TagProperties() - .setName("MetaEnumExample.ENTITY") - .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("a") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("b") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("c") - .setDescription("meta.msg.tag_list"), - new TagProperties() - .setName("repeat_string.a") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("repeat_string.b") - .setDescription("meta.msg.repeat_string"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ), getTestProtobufGraph("extended_protobuf", "messageA") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } + @Test + public void extendedMessageTest() throws IOException { + TagVisitor test = new TagVisitor(); + assertEquals( + Set.of( + new TagProperties() + .setName("bool_feature") + .setDescription("meta.msg.bool_feature is true."), + new TagProperties() + .setName("MetaEnumExample.ENTITY") + .setDescription("Enum MetaEnumExample.ENTITY of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("a").setDescription("meta.msg.tag_list"), + new TagProperties().setName("b").setDescription("meta.msg.tag_list"), + new TagProperties().setName("c").setDescription("meta.msg.tag_list"), + new TagProperties().setName("repeat_string.a").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("repeat_string.b").setDescription("meta.msg.repeat_string"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")), + getTestProtobufGraph("extended_protobuf", "messageA") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(test)) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } - @Test - public void extendedFieldTest() throws IOException { - Set expectedTagProperties = Set.of( - new TagProperties() - .setName("product_type_bool") - .setDescription("meta.fld.product_type_bool is true."), - new TagProperties() - .setName("product_type.my type") - .setDescription("meta.fld.product_type"), - new TagProperties() - .setName("MetaEnumExample.EVENT") - .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), - new TagProperties() - .setName("d") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("e") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("f") - .setDescription("meta.fld.tag_list"), - new TagProperties() - .setName("deprecated") - .setColorHex("#FF0000") - ); + @Test + public void extendedFieldTest() throws IOException { + Set expectedTagProperties = + Set.of( + new TagProperties() + .setName("product_type_bool") + .setDescription("meta.fld.product_type_bool is true."), + new TagProperties() + .setName("product_type.my type") + .setDescription("meta.fld.product_type"), + new TagProperties() + .setName("MetaEnumExample.EVENT") + .setDescription("Enum MetaEnumExample.EVENT of {UNKNOWN, ENTITY, EVENT}"), + new TagProperties().setName("d").setDescription("meta.fld.tag_list"), + new TagProperties().setName("e").setDescription("meta.fld.tag_list"), + new TagProperties().setName("f").setDescription("meta.fld.tag_list"), + new TagProperties().setName("deprecated").setColorHex("#FF0000")); - assertEquals(expectedTagProperties, - getTestProtobufGraph("extended_protobuf", "messageB") - .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) - .map(MetadataChangeProposalWrapper::getAspect) - .collect(Collectors.toSet())); - } -} \ No newline at end of file + assertEquals( + expectedTagProperties, + getTestProtobufGraph("extended_protobuf", "messageB") + .accept(getVisitContextBuilder("extended_protobuf.Person"), List.of(new TagVisitor())) + .map(MetadataChangeProposalWrapper::getAspect) + .collect(Collectors.toSet())); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java index 4fd5c771caeba..4cff55afc92de 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DataJobLineageAdd.java @@ -3,46 +3,54 @@ import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.datajob.DataJobInputOutputPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DataJobLineageAdd { - private DataJobLineageAdd() { - - } + private DataJobLineageAdd() {} /** * Adds lineage to an existing DataJob without affecting any lineage + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { String token = ""; - try (RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - )) { - MetadataChangeProposal dataJobIOPatch = new DataJobInputOutputPatchBuilder().urn(UrnUtils - .getUrn("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) - .addInputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) - .addOutputDatasetEdge(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) - .addInputDatajobEdge(DataJobUrn.createFromString("urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) - .addInputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) - .addOutputDatasetField(UrnUtils.getUrn( - "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) - .build(); + try (RestEmitter emitter = + RestEmitter.create(b -> b.server("http://localhost:8080").token(token))) { + MetadataChangeProposal dataJobIOPatch = + new DataJobInputOutputPatchBuilder() + .urn( + UrnUtils.getUrn( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_456)")) + .addInputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)")) + .addOutputDatasetEdge( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleHiveDataset,PROD)")) + .addInputDatajobEdge( + DataJobUrn.createFromString( + "urn:li:dataJob:(urn:li:dataFlow:(airflow,dag_abc,PROD),task_123)")) + .addInputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_deleted,PROD),user_id)")) + .addOutputDatasetField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,fct_users_created,PROD),user_id)")) + .build(); Future response = emitter.emit(dataJobIOPatch); @@ -51,9 +59,5 @@ public static void main(String[] args) throws IOException, ExecutionException, I log.error("Failed to emit metadata to DataHub", e); throw new RuntimeException(e); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java index ac368972e8dc9..342fbddde8223 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetAdd.java @@ -15,70 +15,79 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class DatasetAdd { - - private DatasetAdd() { - - } - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); - CorpuserUrn userUrn = new CorpuserUrn("ingestion"); - AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); + private DatasetAdd() {} - SchemaMetadata schemaMetadata = new SchemaMetadata() - .setSchemaName("customer") - .setPlatform(new DataPlatformUrn("hive")) - .setVersion(0L) - .setHash("") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new OtherSchema().setRawSchema("__insert raw schema here__"))) - .setLastModified(lastModified); + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + DatasetUrn datasetUrn = UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD"); + CorpuserUrn userUrn = new CorpuserUrn("ingestion"); + AuditStamp lastModified = new AuditStamp().setTime(1640692800000L).setActor(userUrn); - SchemaFieldArray fields = new SchemaFieldArray(); + SchemaMetadata schemaMetadata = + new SchemaMetadata() + .setSchemaName("customer") + .setPlatform(new DataPlatformUrn("hive")) + .setVersion(0L) + .setHash("") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create( + new OtherSchema().setRawSchema("__insert raw schema here__"))) + .setLastModified(lastModified); - SchemaField field1 = new SchemaField() - .setFieldPath("address.zipcode") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(50)") - .setDescription("This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") - .setLastModified(lastModified); - fields.add(field1); + SchemaFieldArray fields = new SchemaFieldArray(); - SchemaField field2 = new SchemaField().setFieldPath("address.street") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("VARCHAR(100)") - .setDescription("Street corresponding to the address") - .setLastModified(lastModified); - fields.add(field2); + SchemaField field1 = + new SchemaField() + .setFieldPath("address.zipcode") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(50)") + .setDescription( + "This is the zipcode of the address. Specified using extended form and limited to addresses in the United States") + .setLastModified(lastModified); + fields.add(field1); - SchemaField field3 = new SchemaField().setFieldPath("last_sold_date") - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) - .setNativeDataType("Date") - .setDescription("Date of the last sale date for this property") - .setLastModified(lastModified); - fields.add(field3); + SchemaField field2 = + new SchemaField() + .setFieldPath("address.street") + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("VARCHAR(100)") + .setDescription("Street corresponding to the address") + .setLastModified(lastModified); + fields.add(field2); - schemaMetadata.setFields(fields); + SchemaField field3 = + new SchemaField() + .setFieldPath("last_sold_date") + .setType( + new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new DateType()))) + .setNativeDataType("Date") + .setDescription("Date of the last sale date for this property") + .setLastModified(lastModified); + fields.add(field3); - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("dataset") - .entityUrn(datasetUrn) - .upsert() - .aspect(schemaMetadata) - .build(); + schemaMetadata.setFields(fields); - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - } + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("dataset") + .entityUrn(datasetUrn) + .upsert() + .aspect(schemaMetadata) + .build(); -} \ No newline at end of file + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } +} diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java index 5d1698556cac5..b30cb5166df70 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAdd.java @@ -1,55 +1,49 @@ package io.datahubproject.examples; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.mxe.MetadataChangeProposal; import datahub.client.MetadataWriteResponse; import datahub.client.patch.dataset.DatasetPropertiesPatchBuilder; import datahub.client.rest.RestEmitter; import java.io.IOException; -import com.linkedin.mxe.MetadataChangeProposal; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAdd { - private DatasetCustomPropertiesAdd() { - - } + private DatasetCustomPropertiesAdd() {} /** - * Adds properties to an existing custom properties aspect without affecting any existing properties + * Adds properties to an existing custom properties aspect without affecting any existing + * properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .addCustomProperty("retention_time", "2 years") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .addCustomProperty("retention_time", "2 years") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java index 9a0ec2030be48..0a89e87060698 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesAddRemove.java @@ -10,47 +10,40 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesAddRemove { - private DatasetCustomPropertiesAddRemove() { - - } + private DatasetCustomPropertiesAddRemove() {} /** * Applies Add and Remove property operations on an existing custom properties aspect without * affecting any other properties + * * @param args * @throws IOException * @throws ExecutionException * @throws InterruptedException */ - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .addCustomProperty("cluster_name", "datahubproject.acryl.io") - .removeCustomProperty("retention_time") - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - try { - Future response = emitter.emit(datasetPropertiesProposal); - - System.out.println(response.get().getResponseContent()); - } catch (Exception e) { - log.error("Failed to emit metadata to DataHub", e); - throw e; - } finally { - emitter.close(); - } - + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .addCustomProperty("cluster_name", "datahubproject.acryl.io") + .removeCustomProperty("retention_time") + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + try { + Future response = emitter.emit(datasetPropertiesProposal); + + System.out.println(response.get().getResponseContent()); + } catch (Exception e) { + log.error("Failed to emit metadata to DataHub", e); + throw e; + } finally { + emitter.close(); } - + } } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java index 1d4c937e2f6a0..053c1f068e048 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/DatasetCustomPropertiesReplace.java @@ -11,17 +11,15 @@ import java.util.concurrent.Future; import lombok.extern.slf4j.Slf4j; - @Slf4j class DatasetCustomPropertiesReplace { - private DatasetCustomPropertiesReplace() { - - } + private DatasetCustomPropertiesReplace() {} /** - * Replaces the existing custom properties map with a new map. - * Fields like dataset name, description etc remain unchanged. + * Replaces the existing custom properties map with a new map. Fields like dataset name, + * description etc remain unchanged. + * * @param args * @throws IOException */ @@ -29,16 +27,14 @@ public static void main(String[] args) throws IOException { Map customPropsMap = new HashMap<>(); customPropsMap.put("cluster_name", "datahubproject.acryl.io"); customPropsMap.put("retention_time", "2 years"); - MetadataChangeProposal datasetPropertiesProposal = new DatasetPropertiesPatchBuilder() - .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) - .setCustomProperties(customPropsMap) - .build(); + MetadataChangeProposal datasetPropertiesProposal = + new DatasetPropertiesPatchBuilder() + .urn(UrnUtils.toDatasetUrn("hive", "fct_users_deleted", "PROD")) + .setCustomProperties(customPropsMap) + .build(); String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); try { Future response = emitter.emit(datasetPropertiesProposal); @@ -48,9 +44,5 @@ public static void main(String[] args) throws IOException { } finally { emitter.close(); } - } - } - - diff --git a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java index 077489a9e02d9..233434ccf7002 100644 --- a/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java +++ b/metadata-integration/java/examples/src/main/java/io/datahubproject/examples/TagCreate.java @@ -4,37 +4,32 @@ import datahub.client.MetadataWriteResponse; import datahub.client.rest.RestEmitter; import datahub.event.MetadataChangeProposalWrapper; - import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; public class TagCreate { - - private TagCreate() { - - } - - public static void main(String[] args) throws IOException, ExecutionException, InterruptedException { - TagProperties tagProperties = new TagProperties() - .setName("Deprecated") - .setDescription("Having this tag means this column or table is deprecated."); - - MetadataChangeProposalWrapper mcpw = MetadataChangeProposalWrapper.builder() - .entityType("tag") - .entityUrn("urn:li:tag:deprecated") - .upsert() - .aspect(tagProperties) - .build(); - - String token = ""; - RestEmitter emitter = RestEmitter.create( - b -> b.server("http://localhost:8080") - .token(token) - ); - Future response = emitter.emit(mcpw, null); - System.out.println(response.get().getResponseContent()); - - } + private TagCreate() {} + + public static void main(String[] args) + throws IOException, ExecutionException, InterruptedException { + TagProperties tagProperties = + new TagProperties() + .setName("Deprecated") + .setDescription("Having this tag means this column or table is deprecated."); + + MetadataChangeProposalWrapper mcpw = + MetadataChangeProposalWrapper.builder() + .entityType("tag") + .entityUrn("urn:li:tag:deprecated") + .upsert() + .aspect(tagProperties) + .build(); + + String token = ""; + RestEmitter emitter = RestEmitter.create(b -> b.server("http://localhost:8080").token(token)); + Future response = emitter.emit(mcpw, null); + System.out.println(response.get().getResponseContent()); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java index 2b9d20009eeb7..3dc5cfc919c16 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut1.java @@ -7,25 +7,27 @@ public class HdfsIn2HdfsOut1 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut1.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { + public static void main(String[] args) { - System.out.println("Inside main"); - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + System.out.println("Inside main"); + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); - } + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java index ed7dd95431a34..34a5e5dfaef97 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HdfsOut2.java @@ -1,6 +1,5 @@ package test.spark.lineage; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -8,30 +7,31 @@ public class HdfsIn2HdfsOut2 { - private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + private static final String TEST_NAME = "Java" + HdfsIn2HdfsOut2.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - Dataset df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - // InsertIntoHadoopFsRelationCommand - df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv"); + Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - Dataset dfO = spark - .sql("select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + Dataset df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); - // InsertIntoHadoopFsRelationCommand - dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - spark.stop(); + // InsertIntoHadoopFsRelationCommand + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); - } + Dataset dfO = + spark.sql( + "select v1.c1 as a1, v1.c2 as b1, v2.c1 as c1, v2.c2 as d1 from v1 join v2 on v1.id = v2.id"); + // InsertIntoHadoopFsRelationCommand + dfO.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/" + TEST_NAME + "/out.csv"); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java index b2bafcfade35d..1fc6d0374d2ed 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateInsertTable.java @@ -7,29 +7,44 @@ public class HdfsIn2HiveCreateInsertTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo4")); - - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(Utils.tbl(TEST_NAME,"foo4")); // CreateDataSourceTableAsSelectCommand - df.write().insertInto(Utils.tbl(TEST_NAME,"foo4")); // InsertIntoHadoopFsRelationCommand - - spark.stop(); - } - + private static final String TEST_NAME = + "Java" + HdfsIn2HiveCreateInsertTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo4")); + + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(Utils.tbl(TEST_NAME, "foo4")); // CreateDataSourceTableAsSelectCommand + df.write().insertInto(Utils.tbl(TEST_NAME, "foo4")); // InsertIntoHadoopFsRelationCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java index ca15bfee111fe..6d9cc032f7e9d 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HdfsIn2HiveCreateTable.java @@ -7,27 +7,39 @@ public class HdfsIn2HiveCreateTable { - private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; - - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME,"foo3")); - - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); - - Dataset df = df1.join(df2, "id").drop("id"); - - df.write().mode(SaveMode.Overwrite).saveAsTable(Utils.tbl(TEST_NAME,"foo3")); // CreateDataSourceTableAsSelectCommand - - spark.stop(); - } - + private static final String TEST_NAME = "Java" + HdfsIn2HiveCreateTable.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; + + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo3")); + + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); + + Dataset df = df1.join(df2, "id").drop("id"); + + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(Utils.tbl(TEST_NAME, "foo3")); // CreateDataSourceTableAsSelectCommand + + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java index 6b8de329ba05a..7d71136e27f24 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut.java @@ -6,44 +6,66 @@ public class HiveInHiveOut { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = + SparkSession.builder().appName(TEST_NAME).enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java index 2d31b72998637..598b347cd2064 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/HiveInHiveOut_test1.java @@ -6,44 +6,65 @@ public class HiveInHiveOut_test1 { - private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); - private static final String DATA_DIR = "../resources/data"; + private static final String TEST_NAME = "Java" + HiveInHiveOut.class.getSimpleName(); + private static final String DATA_DIR = "../resources/data"; - public static void main(String[] args) { - SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); + public static void main(String[] args) { + SparkSession spark = SparkSession.builder().enableHiveSupport().getOrCreate(); - spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); - spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); - spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); + spark.sql("DROP DATABASE IF EXISTS " + TEST_NAME + " CASCADE"); + spark.sql("CREATE DATABASE IF NOT EXISTS " + TEST_NAME); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "hivetab")); + spark.sql("DROP TABLE IF EXISTS " + Utils.tbl(TEST_NAME, "foo5")); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); - df1.createOrReplaceTempView("v1"); - df2.createOrReplaceTempView("v2"); + df1.createOrReplaceTempView("v1"); + df2.createOrReplaceTempView("v2"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "foo5") + " as " - + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); - // CreateHiveTableAsSelectCommand - spark.sql("create table " + Utils.tbl(TEST_NAME, "hivetab") + " as " + "(select * from " - + Utils.tbl(TEST_NAME, "foo5") + ")"); + // CreateHiveTableAsSelectCommand + spark.sql( + "create table " + + Utils.tbl(TEST_NAME, "hivetab") + + " as " + + "(select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - // InsertIntoHiveTable - spark.sql( - "insert into " + Utils.tbl(TEST_NAME, "hivetab") + " (select * from " + Utils.tbl(TEST_NAME, "foo5") + ")"); + // InsertIntoHiveTable + spark.sql( + "insert into " + + Utils.tbl(TEST_NAME, "hivetab") + + " (select * from " + + Utils.tbl(TEST_NAME, "foo5") + + ")"); - Dataset df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); + Dataset df = spark.sql("select * from " + Utils.tbl(TEST_NAME, "foo5")); - // InsertIntoHiveTable - df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); - - spark.stop(); - } + // InsertIntoHiveTable + df.write().insertInto(Utils.tbl(TEST_NAME, "hivetab")); + spark.stop(); + } } diff --git a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java index 22007a8d41e90..278d7068f20bc 100644 --- a/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java +++ b/metadata-integration/java/spark-lineage/spark-smoke-test/test-spark-lineage/src/main/java/test/spark/lineage/Utils.java @@ -1,7 +1,7 @@ package test.spark.lineage; public class Utils { - public static String tbl(String testDb ,String tbl) { + public static String tbl(String testDb, String tbl) { return testDb + "." + tbl; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java index 90410332c3d7a..1dda979bfcefd 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatahubSparkListener.java @@ -1,6 +1,17 @@ package datahub.spark; +import com.google.common.base.Splitter; +import com.typesafe.config.Config; import datahub.spark.consumer.impl.CoalesceJobsEmitter; +import datahub.spark.consumer.impl.McpEmitter; +import datahub.spark.model.AppEndEvent; +import datahub.spark.model.AppStartEvent; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecEndEvent; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; @@ -15,7 +26,7 @@ import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import lombok.extern.slf4j.Slf4j; import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkEnv; @@ -30,27 +41,12 @@ import org.apache.spark.sql.execution.SQLExecution; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionEnd; import org.apache.spark.sql.execution.ui.SparkListenerSQLExecutionStart; - -import com.google.common.base.Splitter; -import com.typesafe.config.Config; - -import datahub.spark.consumer.impl.McpEmitter; -import datahub.spark.model.AppEndEvent; -import datahub.spark.model.AppStartEvent; -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecEndEvent; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import org.apache.spark.util.JsonProtocol; import org.json4s.jackson.JsonMethods$; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; import scala.runtime.AbstractPartialFunction; - @Slf4j public class DatahubSparkListener extends SparkListener { @@ -63,7 +59,8 @@ public class DatahubSparkListener extends SparkListener { public static final String COALESCE_KEY = "coalesce_jobs"; private final Map appDetails = new ConcurrentHashMap<>(); - private final Map> appSqlDetails = new ConcurrentHashMap<>(); + private final Map> appSqlDetails = + new ConcurrentHashMap<>(); private final Map appEmitters = new ConcurrentHashMap<>(); private final Map appConfig = new ConcurrentHashMap<>(); @@ -77,15 +74,22 @@ private class SqlStartTask { private final SparkContext ctx; private final LogicalPlan plan; - public SqlStartTask(SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { + public SqlStartTask( + SparkListenerSQLExecutionStart sqlStart, LogicalPlan plan, SparkContext ctx) { this.sqlStart = sqlStart; this.plan = plan; this.ctx = ctx; String jsonPlan = (plan != null) ? plan.toJSON() : null; String sqlStartJson = - (sqlStart != null) ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) : null; - log.debug("SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", sqlStartJson, jsonPlan, ctx); + (sqlStart != null) + ? JsonMethods$.MODULE$.compact(JsonProtocol.sparkEventToJson(sqlStart)) + : null; + log.debug( + "SqlStartTask with parameters: sqlStart: {}, plan: {}, ctx: {}", + sqlStartJson, + jsonPlan, + ctx); } public void run() { @@ -104,40 +108,55 @@ public void run() { return; } - appSqlDetails.get(ctx.applicationId()) - .put(sqlStart.executionId(), - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), null)); - log.debug("PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); + appSqlDetails + .get(ctx.applicationId()) + .put( + sqlStart.executionId(), + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + null)); + log.debug( + "PLAN for execution id: " + getPipelineName(ctx) + ":" + sqlStart.executionId() + "\n"); log.debug(plan.toString()); - Optional> outputDS = DatasetExtractor.asDataset(plan, ctx, true); + Optional> outputDS = + DatasetExtractor.asDataset(plan, ctx, true); if (!outputDS.isPresent() || outputDS.get().isEmpty()) { - log.debug("Skipping execution as no output dataset present for execution id: " + ctx.applicationId() + ":" - + sqlStart.executionId()); + log.debug( + "Skipping execution as no output dataset present for execution id: " + + ctx.applicationId() + + ":" + + sqlStart.executionId()); return; } // Here assumption is that there will be only single target for single sql query DatasetLineage lineage = - new DatasetLineage(sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); + new DatasetLineage( + sqlStart.description(), plan.toString(), outputDS.get().iterator().next()); Collection> allInners = new ArrayList<>(); - plan.collect(new AbstractPartialFunction() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); - return null; - } + plan.collect( + new AbstractPartialFunction() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + allInners.addAll(JavaConversions.asJavaCollection(plan.innerChildren())); + return null; + } - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); for (QueryPlan qp : allInners) { if (!(qp instanceof LogicalPlan)) { @@ -145,28 +164,42 @@ public boolean isDefinedAt(LogicalPlan x) { } LogicalPlan nestedPlan = (LogicalPlan) qp; - nestedPlan.collect(new AbstractPartialFunction() { - - @Override - public Void apply(LogicalPlan plan) { - log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); - Optional> inputDS = DatasetExtractor.asDataset(plan, ctx, false); - inputDS.ifPresent( - x -> log.debug("source added for " + ctx.appName() + "/" + sqlStart.executionId() + ": " + x)); - inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); - return null; - } - - @Override - public boolean isDefinedAt(LogicalPlan x) { - return true; - } - }); + nestedPlan.collect( + new AbstractPartialFunction() { + + @Override + public Void apply(LogicalPlan plan) { + log.debug("INNER CHILD " + plan.getClass() + "\n" + plan + "\n-------------\n"); + Optional> inputDS = + DatasetExtractor.asDataset(plan, ctx, false); + inputDS.ifPresent( + x -> + log.debug( + "source added for " + + ctx.appName() + + "/" + + sqlStart.executionId() + + ": " + + x)); + inputDS.ifPresent(x -> x.forEach(y -> lineage.addSource(y))); + return null; + } + + @Override + public boolean isDefinedAt(LogicalPlan x) { + return true; + } + }); } SQLQueryExecStartEvent evt = - new SQLQueryExecStartEvent(ctx.conf().get("spark.master"), getPipelineName(ctx), ctx.applicationId(), - sqlStart.time(), sqlStart.executionId(), lineage); + new SQLQueryExecStartEvent( + ctx.conf().get("spark.master"), + getPipelineName(ctx), + ctx.applicationId(), + sqlStart.time(), + sqlStart.executionId(), + lineage); appSqlDetails.get(ctx.applicationId()).put(sqlStart.executionId(), evt); @@ -185,14 +218,16 @@ public boolean isDefinedAt(LogicalPlan x) { public void onApplicationStart(SparkListenerApplicationStart applicationStart) { try { log.info("Application started: " + applicationStart); - LineageUtils.findSparkCtx().foreach(new AbstractFunction1() { - - @Override - public Void apply(SparkContext sc) { - checkOrCreateApplicationSetup(sc); - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1() { + + @Override + public Void apply(SparkContext sc) { + checkOrCreateApplicationSetup(sc); + return null; + } + }); super.onApplicationStart(applicationStart); } catch (Exception e) { // log error, but don't impact thread @@ -207,41 +242,52 @@ public Void apply(SparkContext sc) { @Override public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) { try { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1() { - - @Override - public Void apply(SparkContext sc) { - log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); - AppStartEvent start = appDetails.remove(sc.applicationId()); - appSqlDetails.remove(sc.applicationId()); - if (start == null) { - log.error("Application end event received, but start event missing for appId " + sc.applicationId()); - } else { - AppEndEvent evt = new AppEndEvent(LineageUtils.getMaster(sc), getPipelineName(sc), sc.applicationId(), - applicationEnd.time(), start); - - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - try { - emitter.close(); - appEmitters.remove(sc.applicationId()); - } catch (Exception e) { - log.warn("Failed to close underlying emitter due to {}", e.getMessage()); - } - } - consumers().forEach(x -> { - x.accept(evt); - try { - x.close(); - } catch (IOException e) { - log.warn("Failed to close lineage consumer", e); - } - }); - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1() { + + @Override + public Void apply(SparkContext sc) { + log.info("Application ended : {} {}", sc.appName(), sc.applicationId()); + AppStartEvent start = appDetails.remove(sc.applicationId()); + appSqlDetails.remove(sc.applicationId()); + if (start == null) { + log.error( + "Application end event received, but start event missing for appId " + + sc.applicationId()); + } else { + AppEndEvent evt = + new AppEndEvent( + LineageUtils.getMaster(sc), + getPipelineName(sc), + sc.applicationId(), + applicationEnd.time(), + start); + + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + try { + emitter.close(); + appEmitters.remove(sc.applicationId()); + } catch (Exception e) { + log.warn("Failed to close underlying emitter due to {}", e.getMessage()); + } + } + consumers() + .forEach( + x -> { + x.accept(evt); + try { + x.close(); + } catch (IOException e) { + log.warn("Failed to close lineage consumer", e); + } + }); + } + return null; + } + }); super.onApplicationEnd(applicationEnd); } catch (Exception e) { // log error, but don't impact thread @@ -276,27 +322,37 @@ public void onOtherEvent(SparkListenerEvent event) { } public void processExecutionEnd(SparkListenerSQLExecutionEnd sqlEnd) { - LineageUtils.findSparkCtx().foreach(new AbstractFunction1() { - - @Override - public Void apply(SparkContext sc) { - SQLQueryExecStartEvent start = appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); - if (start == null) { - log.error( - "Execution end event received, but start event missing for appId/sql exec Id " + sc.applicationId() + ":" - + sqlEnd.executionId()); - } else if (start.getDatasetLineage() != null) { - SQLQueryExecEndEvent evt = - new SQLQueryExecEndEvent(LineageUtils.getMaster(sc), sc.appName(), sc.applicationId(), sqlEnd.time(), - sqlEnd.executionId(), start); - McpEmitter emitter = appEmitters.get(sc.applicationId()); - if (emitter != null) { - emitter.accept(evt); - } - } - return null; - } - }); + LineageUtils.findSparkCtx() + .foreach( + new AbstractFunction1() { + + @Override + public Void apply(SparkContext sc) { + SQLQueryExecStartEvent start = + appSqlDetails.get(sc.applicationId()).remove(sqlEnd.executionId()); + if (start == null) { + log.error( + "Execution end event received, but start event missing for appId/sql exec Id " + + sc.applicationId() + + ":" + + sqlEnd.executionId()); + } else if (start.getDatasetLineage() != null) { + SQLQueryExecEndEvent evt = + new SQLQueryExecEndEvent( + LineageUtils.getMaster(sc), + sc.appName(), + sc.applicationId(), + sqlEnd.time(), + sqlEnd.executionId(), + start); + McpEmitter emitter = appEmitters.get(sc.applicationId()); + if (emitter != null) { + emitter.accept(evt); + } + } + return null; + } + }); } private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { @@ -306,15 +362,27 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { if (datahubConfig == null) { Config datahubConf = LineageUtils.parseSparkConfig(); appConfig.put(appId, datahubConf); - Config pipelineConfig = datahubConf.hasPath(PIPELINE_KEY) ? datahubConf.getConfig(PIPELINE_KEY) - : com.typesafe.config.ConfigFactory.empty(); + Config pipelineConfig = + datahubConf.hasPath(PIPELINE_KEY) + ? datahubConf.getConfig(PIPELINE_KEY) + : com.typesafe.config.ConfigFactory.empty(); AppStartEvent evt = - new AppStartEvent(LineageUtils.getMaster(ctx), getPipelineName(ctx), appId, ctx.startTime(), ctx.sparkUser(), + new AppStartEvent( + LineageUtils.getMaster(ctx), + getPipelineName(ctx), + appId, + ctx.startTime(), + ctx.sparkUser(), pipelineConfig); - appEmitters.computeIfAbsent(appId, - s -> datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) ? new CoalesceJobsEmitter( - datahubConf) : new McpEmitter(datahubConf)).accept(evt); + appEmitters + .computeIfAbsent( + appId, + s -> + datahubConf.hasPath(COALESCE_KEY) && datahubConf.getBoolean(COALESCE_KEY) + ? new CoalesceJobsEmitter(datahubConf) + : new McpEmitter(datahubConf)) + .accept(evt); consumers().forEach(c -> c.accept(evt)); appDetails.put(appId, evt); appSqlDetails.put(appId, new ConcurrentHashMap<>()); @@ -322,7 +390,8 @@ private synchronized void checkOrCreateApplicationSetup(SparkContext ctx) { } private String getPipelineName(SparkContext cx) { - Config datahubConfig = appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); + Config datahubConfig = + appConfig.computeIfAbsent(cx.applicationId(), s -> LineageUtils.parseSparkConfig()); String name = ""; if (datahubConfig.hasPath(DATABRICKS_CLUSTER_KEY)) { name = datahubConfig.getString(DATABRICKS_CLUSTER_KEY) + "_" + cx.applicationId(); @@ -339,8 +408,10 @@ private String getPipelineName(SparkContext cx) { private void processExecution(SparkListenerSQLExecutionStart sqlStart) { QueryExecution queryExec = SQLExecution.getQueryExecution(sqlStart.executionId()); if (queryExec == null) { - log.error("Skipping processing for sql exec Id" + sqlStart.executionId() - + " as Query execution context could not be read from current spark state"); + log.error( + "Skipping processing for sql exec Id" + + sqlStart.executionId() + + " as Query execution context could not be read from current spark state"); return; } LogicalPlan plan = queryExec.optimizedPlan(); @@ -354,7 +425,8 @@ private List consumers() { SparkConf conf = SparkEnv.get().conf(); if (conf.contains(CONSUMER_TYPE_KEY)) { String consumerTypes = conf.get(CONSUMER_TYPE_KEY); - return StreamSupport.stream(Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) + return StreamSupport.stream( + Splitter.on(",").trimResults().split(consumerTypes).spliterator(), false) .map(x -> LineageUtils.getConsumer(x)) .filter(Objects::nonNull) .collect(Collectors.toList()); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java index 51f5d561b26ae..ec8177bbc0e5c 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/DatasetExtractor.java @@ -1,5 +1,13 @@ package datahub.spark; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.FabricType; +import com.typesafe.config.Config; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -10,7 +18,7 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.spark.SparkContext; @@ -32,17 +40,6 @@ import org.apache.spark.sql.hive.execution.HiveTableScanExec; import org.apache.spark.sql.hive.execution.InsertIntoHiveTable; import org.apache.spark.sql.sources.BaseRelation; - -import com.google.common.collect.ImmutableSet; -import com.linkedin.common.FabricType; -import com.typesafe.config.Config; - -import datahub.spark.model.LineageUtils; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; -import lombok.extern.slf4j.Slf4j; import scala.Option; import scala.collection.JavaConversions; import scala.runtime.AbstractFunction1; @@ -50,196 +47,287 @@ @Slf4j public class DatasetExtractor { - private static final Map, PlanToDataset> PLAN_TO_DATASET = new HashMap<>(); - private static final Map, SparkPlanToDataset> SPARKPLAN_TO_DATASET = new HashMap<>(); - private static final Map, RelationToDataset> REL_TO_DATASET = new HashMap<>(); - private static final Set> OUTPUT_CMD = ImmutableSet.of( - InsertIntoHadoopFsRelationCommand.class, SaveIntoDataSourceCommand.class, - CreateDataSourceTableAsSelectCommand.class, CreateHiveTableAsSelectCommand.class, InsertIntoHiveTable.class); + private static final Map, PlanToDataset> PLAN_TO_DATASET = + new HashMap<>(); + private static final Map, SparkPlanToDataset> SPARKPLAN_TO_DATASET = + new HashMap<>(); + private static final Map, RelationToDataset> REL_TO_DATASET = + new HashMap<>(); + private static final Set> OUTPUT_CMD = + ImmutableSet.of( + InsertIntoHadoopFsRelationCommand.class, + SaveIntoDataSourceCommand.class, + CreateDataSourceTableAsSelectCommand.class, + CreateHiveTableAsSelectCommand.class, + InsertIntoHiveTable.class); private static final String DATASET_ENV_KEY = "metadata.dataset.env"; private static final String DATASET_PLATFORM_INSTANCE_KEY = "metadata.dataset.platformInstance"; private static final String TABLE_HIVE_PLATFORM_ALIAS = "metadata.table.hive_platform_alias"; private static final String INCLUDE_SCHEME_KEY = "metadata.include_scheme"; private static final String REMOVE_PARTITION_PATTERN = "metadata.remove_partition_pattern"; - // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - private DatasetExtractor() { + // TODO InsertIntoHiveDirCommand, InsertIntoDataSourceDirCommand - } + private DatasetExtractor() {} private static interface PlanToDataset { - Optional> fromPlanNode(LogicalPlan plan, SparkContext ctx, Config datahubConfig); + Optional> fromPlanNode( + LogicalPlan plan, SparkContext ctx, Config datahubConfig); } private static interface RelationToDataset { - Optional> fromRelation(BaseRelation rel, SparkContext ctx, Config datahubConfig); + Optional> fromRelation( + BaseRelation rel, SparkContext ctx, Config datahubConfig); } private static interface SparkPlanToDataset { - Optional> fromSparkPlanNode(SparkPlan plan, SparkContext ctx, - Config datahubConfig); + Optional> fromSparkPlanNode( + SparkPlan plan, SparkContext ctx, Config datahubConfig); } static { - - SPARKPLAN_TO_DATASET.put(FileSourceScanExec.class, (p, ctx, datahubConfig) -> { - - BaseRelation baseRel = ((FileSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(HiveTableScanExec.class, (p, ctx, datahubConfig) -> { - - HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - SPARKPLAN_TO_DATASET.put(RowDataSourceScanExec.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - SPARKPLAN_TO_DATASET.put(InMemoryTableScanExec.class, (p, ctx, datahubConfig) -> { - InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); - if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); - - }); - - PLAN_TO_DATASET.put(InsertIntoHadoopFsRelationCommand.class, (p, ctx, datahubConfig) -> { - InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; - if (cmd.catalogTable().isDefined()) { - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.catalogTable().get(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - } - return Optional.of(Collections.singletonList(new HdfsPathDataset(cmd.outputPath(), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(LogicalRelation.class, (p, ctx, datahubConfig) -> { - BaseRelation baseRel = ((LogicalRelation) p).relation(); - if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { - return Optional.empty(); - } - return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); - }); - - PLAN_TO_DATASET.put(SaveIntoDataSourceCommand.class, (p, ctx, datahubConfig) -> { - - SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; - - Map options = JavaConversions.mapAsJavaMap(cmd.options()); - String url = options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb - if (url.contains("jdbc")) { - String tbl = options.get("dbtable"); - return Optional.of(Collections.singletonList( - new JdbcDataset(url, tbl, getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - } else if (options.containsKey("path")) { - return Optional.of(Collections.singletonList(new HdfsPathDataset(new Path(options.get("path")), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - } else { - return Optional.empty(); - } - }); - - PLAN_TO_DATASET.put(CreateDataSourceTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; - // TODO what of cmd.mode() - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(CreateHiveTableAsSelectCommand.class, (p, ctx, datahubConfig) -> { - CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableDesc(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - PLAN_TO_DATASET.put(InsertIntoHiveTable.class, (p, ctx, datahubConfig) -> { - InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.table(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(HiveTableRelation.class, (p, ctx, datahubConfig) -> { - HiveTableRelation cmd = (HiveTableRelation) p; - return Optional.of(Collections.singletonList(new CatalogTableDataset(cmd.tableMeta(), - getCommonPlatformInstance(datahubConfig), getTableHivePlatformAlias(datahubConfig), - getCommonFabricType(datahubConfig)))); - }); - - REL_TO_DATASET.put(HadoopFsRelation.class, (r, ctx, datahubConfig) -> { - List res = JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()).stream() - .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())).distinct().collect(Collectors.toList()); - - // TODO mapping to URN TBD - return Optional.of(Collections.singletonList(new HdfsPathDataset(res.get(0), - getCommonPlatformInstance(datahubConfig), getIncludeScheme(datahubConfig), - getCommonFabricType(datahubConfig), getRemovePartitionPattern(datahubConfig)))); - }); - REL_TO_DATASET.put(JDBCRelation.class, (r, ctx, datahubConfig) -> { - JDBCRelation rel = (JDBCRelation) r; - Option tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); - if (tbl.isEmpty()) { - return Optional.empty(); - } - - return Optional.of(Collections.singletonList(new JdbcDataset(rel.jdbcOptions().url(), tbl.get(), - getCommonPlatformInstance(datahubConfig), getCommonFabricType(datahubConfig)))); - }); - - PLAN_TO_DATASET.put(InMemoryRelation.class, (plan, ctx, datahubConfig) -> { - SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); - ArrayList datasets = new ArrayList<>(); - cachedPlan.collectLeaves().toList().foreach(new AbstractFunction1() { - - @Override - public Void apply(SparkPlan leafPlan) { - - if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { - Optional> dataset = SPARKPLAN_TO_DATASET.get(leafPlan.getClass()) - .fromSparkPlanNode(leafPlan, ctx, datahubConfig); - dataset.ifPresent(x -> datasets.addAll(x)); + SPARKPLAN_TO_DATASET.put( + FileSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((FileSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + HiveTableScanExec.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation baseRel = ((HiveTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + RowDataSourceScanExec.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((RowDataSourceScanExec) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + SPARKPLAN_TO_DATASET.put( + InMemoryTableScanExec.class, + (p, ctx, datahubConfig) -> { + InMemoryRelation baseRel = ((InMemoryTableScanExec) p).relation(); + if (!PLAN_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return PLAN_TO_DATASET.get(baseRel.getClass()).fromPlanNode(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + InsertIntoHadoopFsRelationCommand.class, + (p, ctx, datahubConfig) -> { + InsertIntoHadoopFsRelationCommand cmd = (InsertIntoHadoopFsRelationCommand) p; + if (cmd.catalogTable().isDefined()) { + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.catalogTable().get(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + } + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + cmd.outputPath(), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + LogicalRelation.class, + (p, ctx, datahubConfig) -> { + BaseRelation baseRel = ((LogicalRelation) p).relation(); + if (!REL_TO_DATASET.containsKey(baseRel.getClass())) { + return Optional.empty(); + } + return REL_TO_DATASET.get(baseRel.getClass()).fromRelation(baseRel, ctx, datahubConfig); + }); + + PLAN_TO_DATASET.put( + SaveIntoDataSourceCommand.class, + (p, ctx, datahubConfig) -> { + SaveIntoDataSourceCommand cmd = (SaveIntoDataSourceCommand) p; + + Map options = JavaConversions.mapAsJavaMap(cmd.options()); + String url = + options.getOrDefault("url", ""); // e.g. jdbc:postgresql://localhost:5432/sparktestdb + if (url.contains("jdbc")) { + String tbl = options.get("dbtable"); + return Optional.of( + Collections.singletonList( + new JdbcDataset( + url, + tbl, + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + } else if (options.containsKey("path")) { + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + new Path(options.get("path")), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); } else { - log.error(leafPlan.getClass() + " is not yet supported. Please contact datahub team for further support."); + return Optional.empty(); + } + }); + + PLAN_TO_DATASET.put( + CreateDataSourceTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateDataSourceTableAsSelectCommand cmd = (CreateDataSourceTableAsSelectCommand) p; + // TODO what of cmd.mode() + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + CreateHiveTableAsSelectCommand.class, + (p, ctx, datahubConfig) -> { + CreateHiveTableAsSelectCommand cmd = (CreateHiveTableAsSelectCommand) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableDesc(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + PLAN_TO_DATASET.put( + InsertIntoHiveTable.class, + (p, ctx, datahubConfig) -> { + InsertIntoHiveTable cmd = (InsertIntoHiveTable) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.table(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + HiveTableRelation.class, + (p, ctx, datahubConfig) -> { + HiveTableRelation cmd = (HiveTableRelation) p; + return Optional.of( + Collections.singletonList( + new CatalogTableDataset( + cmd.tableMeta(), + getCommonPlatformInstance(datahubConfig), + getTableHivePlatformAlias(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + REL_TO_DATASET.put( + HadoopFsRelation.class, + (r, ctx, datahubConfig) -> { + List res = + JavaConversions.asJavaCollection(((HadoopFsRelation) r).location().rootPaths()) + .stream() + .map(p -> getDirectoryPath(p, ctx.hadoopConfiguration())) + .distinct() + .collect(Collectors.toList()); + + // TODO mapping to URN TBD + return Optional.of( + Collections.singletonList( + new HdfsPathDataset( + res.get(0), + getCommonPlatformInstance(datahubConfig), + getIncludeScheme(datahubConfig), + getCommonFabricType(datahubConfig), + getRemovePartitionPattern(datahubConfig)))); + }); + REL_TO_DATASET.put( + JDBCRelation.class, + (r, ctx, datahubConfig) -> { + JDBCRelation rel = (JDBCRelation) r; + Option tbl = rel.jdbcOptions().parameters().get(JDBCOptions.JDBC_TABLE_NAME()); + if (tbl.isEmpty()) { + return Optional.empty(); } - return null; - } - }); - return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); - }); + + return Optional.of( + Collections.singletonList( + new JdbcDataset( + rel.jdbcOptions().url(), + tbl.get(), + getCommonPlatformInstance(datahubConfig), + getCommonFabricType(datahubConfig)))); + }); + + PLAN_TO_DATASET.put( + InMemoryRelation.class, + (plan, ctx, datahubConfig) -> { + SparkPlan cachedPlan = ((InMemoryRelation) plan).cachedPlan(); + ArrayList datasets = new ArrayList<>(); + cachedPlan + .collectLeaves() + .toList() + .foreach( + new AbstractFunction1() { + + @Override + public Void apply(SparkPlan leafPlan) { + + if (SPARKPLAN_TO_DATASET.containsKey(leafPlan.getClass())) { + Optional> dataset = + SPARKPLAN_TO_DATASET + .get(leafPlan.getClass()) + .fromSparkPlanNode(leafPlan, ctx, datahubConfig); + dataset.ifPresent(x -> datasets.addAll(x)); + } else { + log.error( + leafPlan.getClass() + + " is not yet supported. Please contact datahub team for further support."); + } + return null; + } + }); + return datasets.isEmpty() ? Optional.empty() : Optional.of(datasets); + }); } - static Optional> asDataset(LogicalPlan logicalPlan, SparkContext ctx, - boolean outputNode) { + static Optional> asDataset( + LogicalPlan logicalPlan, SparkContext ctx, boolean outputNode) { if (!outputNode && OUTPUT_CMD.contains(logicalPlan.getClass())) { return Optional.empty(); } if (!PLAN_TO_DATASET.containsKey(logicalPlan.getClass())) { - log.error(logicalPlan.getClass() + " is not supported yet. Please contact datahub team for further support. "); + log.error( + logicalPlan.getClass() + + " is not supported yet. Please contact datahub team for further support. "); return Optional.empty(); } Config datahubconfig = LineageUtils.parseSparkConfig(); - return PLAN_TO_DATASET.get(logicalPlan.getClass()).fromPlanNode(logicalPlan, ctx, datahubconfig); + return PLAN_TO_DATASET + .get(logicalPlan.getClass()) + .fromPlanNode(logicalPlan, ctx, datahubconfig); } private static Path getDirectoryPath(Path p, Configuration hadoopConf) { @@ -255,9 +343,10 @@ private static Path getDirectoryPath(Path p, Configuration hadoopConf) { } private static FabricType getCommonFabricType(Config datahubConfig) { - String fabricTypeString = datahubConfig.hasPath(DATASET_ENV_KEY) - ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() - : "PROD"; + String fabricTypeString = + datahubConfig.hasPath(DATASET_ENV_KEY) + ? datahubConfig.getString(DATASET_ENV_KEY).toUpperCase() + : "PROD"; FabricType fabricType = null; try { fabricType = FabricType.valueOf(fabricTypeString); @@ -269,22 +358,26 @@ private static FabricType getCommonFabricType(Config datahubConfig) { } private static String getCommonPlatformInstance(Config datahubConfig) { - return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) + return datahubConfig.hasPath(DATASET_PLATFORM_INSTANCE_KEY) + ? datahubConfig.getString(DATASET_PLATFORM_INSTANCE_KEY) : null; } private static String getTableHivePlatformAlias(Config datahubConfig) { - return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) - : "hive"; + return datahubConfig.hasPath(TABLE_HIVE_PLATFORM_ALIAS) + ? datahubConfig.getString(TABLE_HIVE_PLATFORM_ALIAS) + : "hive"; } private static boolean getIncludeScheme(Config datahubConfig) { - return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) + return datahubConfig.hasPath(INCLUDE_SCHEME_KEY) + ? datahubConfig.getBoolean(INCLUDE_SCHEME_KEY) : true; } private static String getRemovePartitionPattern(Config datahubConfig) { - return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) + return datahubConfig.hasPath(REMOVE_PARTITION_PATTERN) + ? datahubConfig.getString(REMOVE_PARTITION_PATTERN) : null; } } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java index 6ddc5729d88f6..4e6eadc61bae0 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/CoalesceJobsEmitter.java @@ -1,30 +1,27 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Set; -import java.util.TreeSet; - -import com.linkedin.data.template.StringMap; - import com.linkedin.common.DataJobUrnArray; import com.linkedin.common.DatasetUrnArray; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.AppEndEvent; import datahub.spark.model.AppStartEvent; import datahub.spark.model.LineageEvent; import datahub.spark.model.SQLQueryExecStartEvent; +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -37,7 +34,8 @@ public class CoalesceJobsEmitter extends McpEmitter { public CoalesceJobsEmitter(Config datahubConf) { super(datahubConf); - parentJobUrn = datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; + parentJobUrn = + datahubConf.hasPath(PARENT_JOB_KEY) ? datahubConf.getString(PARENT_JOB_KEY) : null; log.info("CoalesceJobsEmitter initialised with " + PARENT_JOB_KEY + ":" + parentJobUrn); } @@ -50,13 +48,21 @@ public void accept(LineageEvent evt) { } else if (evt instanceof SQLQueryExecStartEvent) { SQLQueryExecStartEvent sqlQueryExecStartEvent = (SQLQueryExecStartEvent) evt; sqlQueryExecStartEvents.add(sqlQueryExecStartEvent); - log.debug("SQLQueryExecStartEvent received for processing. for app: " + sqlQueryExecStartEvent.getAppId() + ":" - + sqlQueryExecStartEvent.getAppName() + "sqlID: " + sqlQueryExecStartEvent.getSqlQueryExecId()); + log.debug( + "SQLQueryExecStartEvent received for processing. for app: " + + sqlQueryExecStartEvent.getAppId() + + ":" + + sqlQueryExecStartEvent.getAppName() + + "sqlID: " + + sqlQueryExecStartEvent.getSqlQueryExecId()); } else if (evt instanceof AppEndEvent) { AppEndEvent appEndEvent = (AppEndEvent) evt; if (appStartEvent == null) { - log.error("Application End event received for processing but start event is not received for processing for " - + appEndEvent.getAppId() + "-" + appEndEvent.getAppName()); + log.error( + "Application End event received for processing but start event is not received for processing for " + + appEndEvent.getAppId() + + "-" + + appEndEvent.getAppName()); return; } log.debug("AppEndEvent received for processing. for app start :" + appEndEvent.getAppId()); @@ -65,7 +71,8 @@ public void accept(LineageEvent evt) { } } - private List squashSQLQueryExecStartEvents(AppEndEvent appEndEvent) { + private List squashSQLQueryExecStartEvents( + AppEndEvent appEndEvent) { DataJobUrn jobUrn = new DataJobUrn(appStartEvent.getFlowUrn(), appStartEvent.getAppName()); @@ -85,11 +92,15 @@ private List squashSQLQueryExecStartEvents(AppEnd log.warn(PARENT_JOB_KEY + " is not a valid Datajob URN. Skipping setting up upstream job."); } - DataJobInputOutput jobio = new DataJobInputOutput().setInputDatasets(new DatasetUrnArray(inSet)) - .setOutputDatasets(new DatasetUrnArray(outSet)).setInputDatajobs(upStreamjobs); + DataJobInputOutput jobio = + new DataJobInputOutput() + .setInputDatasets(new DatasetUrnArray(inSet)) + .setOutputDatasets(new DatasetUrnArray(outSet)) + .setInputDatajobs(upStreamjobs); - MetadataChangeProposalWrapper mcpJobIO = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); + MetadataChangeProposalWrapper mcpJobIO = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobio)); StringMap customProps = new StringMap(); customProps.put("startedAt", appStartEvent.timeStr()); @@ -97,15 +108,17 @@ private List squashSQLQueryExecStartEvents(AppEnd customProps.put("appName", appStartEvent.getAppName()); customProps.put("completedAt", appEndEvent.timeStr()); - DataJobInfo jobInfo = new DataJobInfo().setName(appStartEvent.getAppName()) - .setType(DataJobInfo.Type.create("sparkJob")); + DataJobInfo jobInfo = + new DataJobInfo() + .setName(appStartEvent.getAppName()) + .setType(DataJobInfo.Type.create("sparkJob")); jobInfo.setCustomProperties(customProps); jobInfo.setStatus(JobStatus.COMPLETED); - MetadataChangeProposalWrapper mcpJobInfo = MetadataChangeProposalWrapper - .create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper mcpJobInfo = + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); - } @Override @@ -120,5 +133,4 @@ class DataSetUrnComparator implements Comparator { public int compare(DatasetUrn urn1, DatasetUrn urn2) { return urn1.toString().compareTo(urn2.toString()); } - -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java index 336246fa9d3e8..918ce48d1cf42 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/consumer/impl/McpEmitter.java @@ -1,23 +1,20 @@ package datahub.spark.consumer.impl; -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; - import com.typesafe.config.Config; - import datahub.client.Emitter; import datahub.client.rest.RestEmitter; import datahub.client.rest.RestEmitterConfig; import datahub.event.MetadataChangeProposalWrapper; import datahub.spark.model.LineageConsumer; import datahub.spark.model.LineageEvent; +import java.io.IOException; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class McpEmitter implements LineageConsumer { @@ -27,19 +24,21 @@ public class McpEmitter implements LineageConsumer { private static final String GMS_URL_KEY = "rest.server"; private static final String GMS_AUTH_TOKEN = "rest.token"; private static final String DISABLE_SSL_VERIFICATION_KEY = "rest.disable_ssl_verification"; + private Optional getEmitter() { Optional emitter = Optional.empty(); switch (emitterType) { - case "rest": - if (restEmitterConfig.isPresent()) { - emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); - } - break; - - default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - + case "rest": + if (restEmitterConfig.isPresent()) { + emitter = Optional.of(new RestEmitter(restEmitterConfig.get())); + } + break; + + default: + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; } return emitter; } @@ -47,22 +46,28 @@ private Optional getEmitter() { protected void emit(List mcpws) { Optional emitter = getEmitter(); if (emitter.isPresent()) { - mcpws.stream().map(mcpw -> { - try { - log.debug("emitting mcpw: " + mcpw); - return emitter.get().emit(mcpw); - } catch (IOException ioException) { - log.error("Failed to emit metadata to DataHub", ioException); - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toList()).forEach(future -> { - try { - log.info(future.get().toString()); - } catch (InterruptedException | ExecutionException e) { - // log error, but don't impact thread - log.error("Failed to emit metadata to DataHub", e); - } - }); + mcpws.stream() + .map( + mcpw -> { + try { + log.debug("emitting mcpw: " + mcpw); + return emitter.get().emit(mcpw); + } catch (IOException ioException) { + log.error("Failed to emit metadata to DataHub", ioException); + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + .forEach( + future -> { + try { + log.info(future.get().toString()); + } catch (InterruptedException | ExecutionException e) { + // log error, but don't impact thread + log.error("Failed to emit metadata to DataHub", e); + } + }); try { emitter.get().close(); } catch (IOException e) { @@ -72,31 +77,45 @@ protected void emit(List mcpws) { } public McpEmitter(Config datahubConf) { - emitterType = datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; - switch (emitterType) { + emitterType = + datahubConf.hasPath(TRANSPORT_KEY) ? datahubConf.getString(TRANSPORT_KEY) : "rest"; + switch (emitterType) { case "rest": - String gmsUrl = datahubConf.hasPath(GMS_URL_KEY) ? datahubConf.getString(GMS_URL_KEY) - : "http://localhost:8080"; - String token = datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; - boolean disableSslVerification = datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) ? datahubConf.getBoolean( - DISABLE_SSL_VERIFICATION_KEY) : false; - log.info("REST Emitter Configuration: GMS url {}{}", gmsUrl, - (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); - if (token != null) { - log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); - } - if (disableSslVerification) { - log.warn("REST Emitter Configuration: ssl verification will be disabled."); - } - restEmitterConfig = Optional.of(RestEmitterConfig.builder() - .server(gmsUrl).token(token) - .disableSslVerification(disableSslVerification).build()); - - break; + String gmsUrl = + datahubConf.hasPath(GMS_URL_KEY) + ? datahubConf.getString(GMS_URL_KEY) + : "http://localhost:8080"; + String token = + datahubConf.hasPath(GMS_AUTH_TOKEN) ? datahubConf.getString(GMS_AUTH_TOKEN) : null; + boolean disableSslVerification = + datahubConf.hasPath(DISABLE_SSL_VERIFICATION_KEY) + ? datahubConf.getBoolean(DISABLE_SSL_VERIFICATION_KEY) + : false; + log.info( + "REST Emitter Configuration: GMS url {}{}", + gmsUrl, + (datahubConf.hasPath(GMS_URL_KEY) ? "" : "(default)")); + if (token != null) { + log.info("REST Emitter Configuration: Token {}", (token != null) ? "XXXXX" : "(empty)"); + } + if (disableSslVerification) { + log.warn("REST Emitter Configuration: ssl verification will be disabled."); + } + restEmitterConfig = + Optional.of( + RestEmitterConfig.builder() + .server(gmsUrl) + .token(token) + .disableSslVerification(disableSslVerification) + .build()); + + break; default: - log.error("DataHub Transport {} not recognized. DataHub Lineage emission will not work", emitterType); - break; - } + log.error( + "DataHub Transport {} not recognized. DataHub Lineage emission will not work", + emitterType); + break; + } } @Override @@ -107,8 +126,6 @@ public void accept(LineageEvent evt) { @Override public void close() throws IOException { // Nothing to close at this point - - } - -} \ No newline at end of file + } +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java index 64aef77ddce2f..ac4d3a96308f3 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class AppEndEvent extends LineageEvent { @@ -28,9 +27,11 @@ public List asMetadataEvents() { StringMap customProps = start.customProps(); customProps.put("completedAt", timeStr()); - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps); - return Collections.singletonList(MetadataChangeProposalWrapper.create( - b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); + return Collections.singletonList( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(flowUrn).upsert().aspect(flowInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java index 393de44164ac2..b7f9b462c409f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/AppStartEvent.java @@ -1,19 +1,17 @@ package datahub.spark.model; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.List; - import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.data.template.StringMap; import com.linkedin.datajob.DataFlowInfo; import com.typesafe.config.Config; - import datahub.event.MetadataChangeProposalWrapper; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; @@ -28,7 +26,12 @@ public class AppStartEvent extends LineageEvent { private final String sparkUser; private Config pipelineConfig; - public AppStartEvent(String master, String appName, String appId, long time, String sparkUser, + public AppStartEvent( + String master, + String appName, + String appId, + long time, + String sparkUser, Config pipelineConfig) { super(master, appName, appId, time); this.sparkUser = sparkUser; @@ -38,18 +41,22 @@ public AppStartEvent(String master, String appName, String appId, long time, Str public DataFlowUrn getFlowUrn() { return LineageUtils.flowUrn(getMaster(), getAppName()); } - + @Override public List asMetadataEvents() { ArrayList mcps = new ArrayList(); if (this.pipelineConfig.hasPath(PLATFORM_INSTANCE_KEY)) { try { - DataPlatformInstance dpi = new DataPlatformInstance().setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) - .setInstance(LineageUtils.dataPlatformInstanceUrn(PLATFORM_SPARK, - this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); + DataPlatformInstance dpi = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn(PLATFORM_SPARK)) + .setInstance( + LineageUtils.dataPlatformInstanceUrn( + PLATFORM_SPARK, this.pipelineConfig.getString(PLATFORM_INSTANCE_KEY))); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(dpi))); } catch (URISyntaxException e) { // log error, but don't impact thread StringWriter s = new StringWriter(); @@ -59,9 +66,11 @@ public List asMetadataEvents() { p.close(); } } - DataFlowInfo flowInfo = new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); - mcps.add(MetadataChangeProposalWrapper - .create(b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); + DataFlowInfo flowInfo = + new DataFlowInfo().setName(getAppName()).setCustomProperties(customProps()); + mcps.add( + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataFlow").entityUrn(getFlowUrn()).upsert().aspect(flowInfo))); return mcps; } @@ -73,4 +82,4 @@ StringMap customProps() { customProps.put("sparkUser", sparkUser); return customProps; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java index 9583ab69a2d73..996a911ced9f9 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/DatasetLineage.java @@ -1,11 +1,9 @@ package datahub.spark.model; +import datahub.spark.model.dataset.SparkDataset; import java.util.Collections; import java.util.HashSet; import java.util.Set; - -import datahub.spark.model.dataset.SparkDataset; - import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.ToString; @@ -16,14 +14,11 @@ public class DatasetLineage { private final Set sources = new HashSet<>(); - @Getter - private final String callSiteShort; - - @Getter - private final String plan; + @Getter private final String callSiteShort; + + @Getter private final String plan; - @Getter - private final SparkDataset sink; + @Getter private final SparkDataset sink; public void addSource(SparkDataset source) { sources.add(source); diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java index 890ed6329c47b..aa2d998ea5c99 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageConsumer.java @@ -3,5 +3,4 @@ import java.io.Closeable; import java.util.function.Consumer; -public interface LineageConsumer extends Consumer, Closeable { -} +public interface LineageConsumer extends Consumer, Closeable {} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java index 37b949a454b0d..a88474650c510 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageEvent.java @@ -1,9 +1,8 @@ package datahub.spark.model; +import datahub.event.MetadataChangeProposalWrapper; import java.util.Date; import java.util.List; - -import datahub.event.MetadataChangeProposalWrapper; import lombok.Data; @Data @@ -18,4 +17,4 @@ public abstract class LineageEvent { public String timeStr() { return new Date(getTime()).toInstant().toString(); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java index ad837f034ad64..ad628666a263d 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/LineageUtils.java @@ -8,15 +8,12 @@ import com.linkedin.common.urn.Urn; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; - import org.apache.spark.SparkConf; import org.apache.spark.SparkContext; import org.apache.spark.SparkContext$; @@ -35,33 +32,38 @@ public class LineageUtils { /* This is for generating urn from a hash of the plan */ // private static Function PATH_REPLACER = (x -> x); - private LineageUtils() { - - } + private LineageUtils() {} - public static Urn dataPlatformInstanceUrn(String platform, String instance) throws URISyntaxException { - return new Urn("dataPlatformInstance", + public static Urn dataPlatformInstanceUrn(String platform, String instance) + throws URISyntaxException { + return new Urn( + "dataPlatformInstance", new TupleKey(Arrays.asList(new DataPlatformUrn(platform).toString(), instance))); } public static DataFlowUrn flowUrn(String master, String appName) { - return new DataFlowUrn("spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); + return new DataFlowUrn( + "spark", appName, master.replaceAll(":", "_").replaceAll("/", "_").replaceAll("[_]+", "_")); } public static Option findSparkCtx() { - return SparkSession.getActiveSession().map(new AbstractFunction1() { - - @Override - public SparkContext apply(SparkSession sess) { - return sess.sparkContext(); - } - }).orElse(new AbstractFunction0>() { - - @Override - public Option apply() { - return SparkContext$.MODULE$.getActive(); - } - }); + return SparkSession.getActiveSession() + .map( + new AbstractFunction1() { + + @Override + public SparkContext apply(SparkSession sess) { + return sess.sparkContext(); + } + }) + .orElse( + new AbstractFunction0>() { + + @Override + public Option apply() { + return SparkContext$.MODULE$.getActive(); + } + }); } public static String getMaster(SparkContext ctx) { @@ -79,14 +81,16 @@ public static LineageConsumer getConsumer(String consumerType) { public static Config parseSparkConfig() { SparkConf conf = SparkEnv.get().conf(); - String propertiesString = Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) - .map(tup -> tup._1 + "= \"" + tup._2 + "\"").collect(Collectors.joining("\n")); + String propertiesString = + Arrays.stream(conf.getAllWithPrefix("spark.datahub.")) + .map(tup -> tup._1 + "= \"" + tup._2 + "\"") + .collect(Collectors.joining("\n")); return ConfigFactory.parseString(propertiesString); } // TODO: URN creation with platform instance needs to be inside DatasetUrn class - public static DatasetUrn createDatasetUrn(String platform, String platformInstance, String name, - FabricType fabricType) { + public static DatasetUrn createDatasetUrn( + String platform, String platformInstance, String name, FabricType fabricType) { String datasteName = platformInstance == null ? name : platformInstance + "." + name; return new DatasetUrn(new DataPlatformUrn(platform), datasteName, fabricType); } @@ -103,10 +107,10 @@ public static DatasetUrn createDatasetUrn(String platform, String platformInstan * ""); s = s.replaceAll("Statistics:[^\n]+\n", ""); s = * s.replaceAll("Table Properties:[^\n]+\n", ""); // * System.out.println("CLEAN: " + s); return s; } - * + * * public static void setPathReplacer(Function replacer) { * PATH_REPLACER = replacer; } - * + * * public static String hash(String s) { s = PATH_REPLACER.apply(s); * log.debug("PATH REPLACED " + s); return Hashing.md5().hashString(s, * Charset.forName("US-ASCII")).toString(); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java index 6505cd586b2b5..17d5b941bced2 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecEndEvent.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecEndEvent extends LineageEvent { @@ -17,7 +16,12 @@ public class SQLQueryExecEndEvent extends LineageEvent { private final long sqlQueryExecId; private final SQLQueryExecStartEvent start; - public SQLQueryExecEndEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecEndEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, SQLQueryExecStartEvent start) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -33,6 +37,7 @@ public List asMetadataEvents() { DataJobInfo jobInfo = start.jobInfo().setCustomProperties(customProps); return Collections.singletonList( - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo))); } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java index 0919f40c7e1c9..dbd56a59838bc 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/SQLQueryExecStartEvent.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; import com.linkedin.data.template.StringMap; -import datahub.spark.model.dataset.SparkDataset; import com.linkedin.datajob.DataJobInfo; import com.linkedin.datajob.DataJobInputOutput; import com.linkedin.datajob.JobStatus; import datahub.event.MetadataChangeProposalWrapper; +import datahub.spark.model.dataset.SparkDataset; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -17,14 +17,18 @@ import lombok.Getter; import lombok.ToString; - @ToString @Getter public class SQLQueryExecStartEvent extends LineageEvent { private final long sqlQueryExecId; private final DatasetLineage datasetLineage; - public SQLQueryExecStartEvent(String master, String appName, String appId, long time, long sqlQueryExecId, + public SQLQueryExecStartEvent( + String master, + String appName, + String appId, + long time, + long sqlQueryExecId, DatasetLineage datasetLineage) { super(master, appName, appId, time); this.sqlQueryExecId = sqlQueryExecId; @@ -35,20 +39,24 @@ public SQLQueryExecStartEvent(String master, String appName, String appId, long public List asMetadataEvents() { DataJobUrn jobUrn = jobUrn(); MetadataChangeProposalWrapper mcpJobIO = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobIO())); DataJobInfo jobInfo = jobInfo(); jobInfo.setCustomProperties(customProps()); jobInfo.setStatus(JobStatus.IN_PROGRESS); MetadataChangeProposalWrapper mcpJobInfo = - MetadataChangeProposalWrapper.create(b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); + MetadataChangeProposalWrapper.create( + b -> b.entityType("dataJob").entityUrn(jobUrn).upsert().aspect(jobInfo)); return Arrays.asList(mcpJobIO, mcpJobInfo); } DataJobInfo jobInfo() { - return new DataJobInfo().setName(datasetLineage.getCallSiteShort()).setType(DataJobInfo.Type.create("sparkJob")); + return new DataJobInfo() + .setName(datasetLineage.getCallSiteShort()) + .setType(DataJobInfo.Type.create("sparkJob")); } DataJobUrn jobUrn() { @@ -91,12 +99,14 @@ public DatasetUrnArray getOuputDatasets() { public DatasetUrnArray getInputDatasets() { DatasetUrnArray in = new DatasetUrnArray(); - Set sources = new TreeSet<>(new Comparator() { - @Override - public int compare(SparkDataset x, SparkDataset y) { - return x.urn().toString().compareTo(y.urn().toString()); - } - }); + Set sources = + new TreeSet<>( + new Comparator() { + @Override + public int compare(SparkDataset x, SparkDataset y) { + return x.urn().toString().compareTo(y.urn().toString()); + } + }); sources.addAll(datasetLineage.getSources()); // maintain ordering for (SparkDataset source : sources) { in.add(source.urn()); @@ -106,8 +116,10 @@ public int compare(SparkDataset x, SparkDataset y) { } private DataJobInputOutput jobIO() { - DataJobInputOutput io = new DataJobInputOutput().setInputDatasets(getInputDatasets()) - .setOutputDatasets(getOuputDatasets()); + DataJobInputOutput io = + new DataJobInputOutput() + .setInputDatasets(getInputDatasets()) + .setOutputDatasets(getOuputDatasets()); return io; } -} \ No newline at end of file +} diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java index 47552c69d78c4..13f70392f5bf5 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/CatalogTableDataset.java @@ -1,20 +1,19 @@ package datahub.spark.model.dataset; -import org.apache.spark.sql.catalyst.catalog.CatalogTable; - import com.linkedin.common.FabricType; - import lombok.ToString; +import org.apache.spark.sql.catalyst.catalog.CatalogTable; @ToString public class CatalogTableDataset extends SparkDataset { - public CatalogTableDataset(CatalogTable table, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + CatalogTable table, String platformInstance, String platform, FabricType fabricType) { this(table.qualifiedName(), platformInstance, platform, fabricType); } - public CatalogTableDataset(String dsName, String platformInstance, String platform, FabricType fabricType) { + public CatalogTableDataset( + String dsName, String platformInstance, String platform, FabricType fabricType) { super(platform, platformInstance, dsName, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java index 700aef5d6b15a..c9b05f6a1d22f 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/HdfsPathDataset.java @@ -1,12 +1,9 @@ package datahub.spark.model.dataset; -import org.apache.hadoop.fs.Path; - import com.linkedin.common.FabricType; - -import lombok.ToString; - import java.net.URI; +import lombok.ToString; +import org.apache.hadoop.fs.Path; @ToString public class HdfsPathDataset extends SparkDataset { @@ -30,18 +27,22 @@ private static String getPlatform(Path path) { } public HdfsPathDataset( - Path path, - String platformInstance, - boolean includeScheme, - FabricType fabricType, - String removePartitionPattern) { + Path path, + String platformInstance, + boolean includeScheme, + FabricType fabricType, + String removePartitionPattern) { // TODO check static partitions? - this(getPath(path, includeScheme, removePartitionPattern), platformInstance, getPlatform(path), fabricType); + this( + getPath(path, includeScheme, removePartitionPattern), + platformInstance, + getPlatform(path), + fabricType); } - public HdfsPathDataset(String pathUri, String platformInstance, String platform, FabricType fabricType) { + public HdfsPathDataset( + String pathUri, String platformInstance, String platform, FabricType fabricType) { // TODO check static partitions? super(platform, platformInstance, pathUri, fabricType); } - } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java index ea156b49fada7..1cdca6092bcb7 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/JdbcDataset.java @@ -1,17 +1,16 @@ package datahub.spark.model.dataset; -import java.util.HashMap; -import java.util.Map; - import com.linkedin.common.FabricType; - import io.opentracing.contrib.jdbc.parser.URLParser; +import java.util.HashMap; +import java.util.Map; import lombok.ToString; @ToString public class JdbcDataset extends SparkDataset { - //TODO: Should map to the central location on datahub for platform names + // TODO: Should map to the central location on datahub for platform names private static final Map PLATFORM_NAME_MAPPING = new HashMap<>(); + static { PLATFORM_NAME_MAPPING.put("postgresql", "postgres"); } diff --git a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java index 546b737576e60..64e14f5e31542 100644 --- a/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java +++ b/metadata-integration/java/spark-lineage/src/main/java/datahub/spark/model/dataset/SparkDataset.java @@ -2,16 +2,16 @@ import com.linkedin.common.FabricType; import com.linkedin.common.urn.DatasetUrn; - import datahub.spark.model.LineageUtils; import lombok.EqualsAndHashCode; @EqualsAndHashCode public abstract class SparkDataset { - + private DatasetUrn urn; - - public SparkDataset(String platform, String platformInstance, String name, FabricType fabricType) { + + public SparkDataset( + String platform, String platformInstance, String name, FabricType fabricType) { super(); this.urn = LineageUtils.createDatasetUrn(platform, platformInstance, name, fabricType); } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java index 447200d855a36..2df468fc03e74 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestCoalesceJobLineage.java @@ -3,13 +3,13 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.List; import java.util.Properties; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SparkSession; @@ -29,8 +29,6 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; -import com.linkedin.common.FabricType; - public class TestCoalesceJobLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); // if false, MCPs get written to real GMS server (see GMS_PORT) @@ -59,29 +57,42 @@ public class TestCoalesceJobLineage { private static SparkSession spark; private static Properties jdbcConnnProperties; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file://" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -95,11 +106,16 @@ public static void initMockServer() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); + List expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName).toAbsolutePath()); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -112,23 +128,33 @@ public void setup() { resetBaseExpectations(); System.setProperty("user.dir", Paths.get("coalesce-test").toAbsolutePath().toString()); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()).config("spark.datahub.coalesce_jobs", "true") - .config("spark.datahub.parent.datajob_urn", - "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.datahub.coalesce_jobs", "true") + .config( + "spark.datahub.parent.datajob_urn", + "urn:li:dataJob:(urn:li:dataFlow:(airflow,datahub_analytics_refresh,prod),load_dashboard_info_to_snowflake)") + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @After @@ -150,27 +176,44 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @Test public void testHiveInHiveOutCoalesce() throws Exception { - Dataset df1 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()).withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()).withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in1.csv").getAbsolutePath()) + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(new File(DATA_DIR + "/in2.csv").getAbsolutePath()) + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo_coalesce") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo_coalesce") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); // CreateHiveTableAsSelectCommand - spark.sql("create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); + spark.sql( + "create table " + tbl("hivetab") + " as " + "(select * from " + tbl("foo_coalesce") + ")"); // InsertIntoHiveTable spark.sql("insert into " + tbl("hivetab") + " (select * from " + tbl("foo_coalesce") + ")"); @@ -181,5 +224,4 @@ public void testHiveInHiveOutCoalesce() throws Exception { df.write().insertInto(tbl("hivetab")); Thread.sleep(5000); } - } diff --git a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java index b2280d171e378..3a70c10e0c1f9 100644 --- a/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java +++ b/metadata-integration/java/spark-lineage/src/test/java/datahub/spark/TestSparkJobsLineage.java @@ -5,6 +5,16 @@ import static org.mockserver.integration.ClientAndServer.startClientAndServer; import static org.mockserver.model.HttpRequest.request; +import com.linkedin.common.FabricType; +import datahub.spark.model.DatasetLineage; +import datahub.spark.model.LineageConsumer; +import datahub.spark.model.LineageEvent; +import datahub.spark.model.LineageUtils; +import datahub.spark.model.SQLQueryExecStartEvent; +import datahub.spark.model.dataset.CatalogTableDataset; +import datahub.spark.model.dataset.HdfsPathDataset; +import datahub.spark.model.dataset.JdbcDataset; +import datahub.spark.model.dataset.SparkDataset; import java.io.File; import java.io.IOException; import java.nio.file.Files; @@ -18,7 +28,6 @@ import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; - import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -43,23 +52,11 @@ import org.mockserver.socket.PortFactory; import org.mockserver.verify.VerificationTimes; import org.testcontainers.containers.PostgreSQLContainer; - -import com.linkedin.common.FabricType; - -import datahub.spark.model.DatasetLineage; -import datahub.spark.model.LineageConsumer; -import datahub.spark.model.LineageEvent; -import datahub.spark.model.LineageUtils; -import datahub.spark.model.SQLQueryExecStartEvent; -import datahub.spark.model.dataset.CatalogTableDataset; -import datahub.spark.model.dataset.HdfsPathDataset; -import datahub.spark.model.dataset.JdbcDataset; -import datahub.spark.model.dataset.SparkDataset; import org.testcontainers.containers.wait.strategy.Wait; -//!!!! IMP !!!!!!!! -//Add the test number before naming the test. This will ensure that tests run in specified order. -//This is necessary to have fixed query execution numbers. Otherwise tests will fail. +// !!!! IMP !!!!!!!! +// Add the test number before naming the test. This will ensure that tests run in specified order. +// This is necessary to have fixed query execution numbers. Otherwise tests will fail. @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class TestSparkJobsLineage { private static final boolean MOCK_GMS = Boolean.valueOf("true"); @@ -88,40 +85,53 @@ public class TestSparkJobsLineage { private static final String DATASET_PLATFORM_INSTANCE = "test_dev_dataset"; private static final String TABLE_PLATFORM = "hive"; - @ClassRule - public static PostgreSQLContainer db; + @ClassRule public static PostgreSQLContainer db; + static { - db = new PostgreSQLContainer<>("postgres:9.6.12") - .withDatabaseName("sparktestdb"); + db = new PostgreSQLContainer<>("postgres:9.6.12").withDatabaseName("sparktestdb"); db.waitingFor(Wait.forListeningPort()).withStartupTimeout(Duration.ofMinutes(15)).start(); } + private static SparkSession spark; private static Properties jdbcConnnProperties; private static DatasetLineageAccumulator acc; private static ClientAndServer mockServer; - @Rule - public TestRule mockServerWatcher = new TestWatcher() { - @Override - protected void finished(Description description) { - if (!VERIFY_EXPECTED) { - return; - } - verifyTestScenario(description.getMethodName()); - clear(); - super.finished(description); - } - }; + @Rule + public TestRule mockServerWatcher = + new TestWatcher() { + + @Override + protected void finished(Description description) { + if (!VERIFY_EXPECTED) { + return; + } + verifyTestScenario(description.getMethodName()); + clear(); + super.finished(description); + } + }; private static String addLocalPath(String s) { - return s.replaceAll("file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); + return s.replaceAll( + "file:/" + RESOURCE_DIR, "file:" + Paths.get(RESOURCE_DIR).toAbsolutePath().toString()); } public static void resetBaseExpectations() { - mockServer.when(request().withMethod("GET").withPath("/config").withHeader("Content-type", "application/json"), - Times.unlimited()).respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); mockServer - .when(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + .when( + request() + .withMethod("GET") + .withPath("/config") + .withHeader("Content-type", "application/json"), + Times.unlimited()) + .respond(org.mockserver.model.HttpResponse.response().withBody("{\"noCode\": true }")); + mockServer + .when( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), Times.unlimited()) .respond(HttpResponse.response().withStatusCode(200)); } @@ -134,11 +144,16 @@ public static void init() { public static void verifyTestScenario(String testName) { String expectationFileName = testName + ".json"; try { - List expected = Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); + List expected = + Files.readAllLines(Paths.get(EXPECTED_JSON_ROOT, expectationFileName)); for (String content : expected) { String swappedContent = addLocalPath(content); - mockServer.verify(request().withMethod("POST").withPath("/aspects") - .withQueryStringParameter("action", "ingestProposal").withBody(new JsonBody(swappedContent)), + mockServer.verify( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal") + .withBody(new JsonBody(swappedContent)), VerificationTimes.atLeast(1)); } } catch (IOException ioe) { @@ -151,7 +166,10 @@ public static void verify(int numRequests) { return; } mockServer.verify( - request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal"), + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal"), VerificationTimes.exactly(numRequests)); } @@ -162,14 +180,19 @@ public static void setup() { LineageUtils.registerConsumer("accumulator", acc); init(); - spark = SparkSession.builder().appName(APP_NAME).config("spark.master", MASTER) - .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") - .config("spark.datahub.lineage.consumerTypes", "accumulator") - .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) - .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) - .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) - .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()).enableHiveSupport().getOrCreate(); + spark = + SparkSession.builder() + .appName(APP_NAME) + .config("spark.master", MASTER) + .config("spark.extraListeners", "datahub.spark.DatahubSparkListener") + .config("spark.datahub.lineage.consumerTypes", "accumulator") + .config("spark.datahub.rest.server", "http://localhost:" + mockServer.getPort()) + .config("spark.datahub.metadata.pipeline.platformInstance", PIPELINE_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.platformInstance", DATASET_PLATFORM_INSTANCE) + .config("spark.datahub.metadata.dataset.env", DATASET_ENV.name()) + .config("spark.sql.warehouse.dir", new File(WAREHOUSE_LOC).getAbsolutePath()) + .enableHiveSupport() + .getOrCreate(); spark.sql("drop database if exists " + TEST_DB + " cascade"); spark.sql("create database " + TEST_DB); @@ -184,8 +207,11 @@ public static void setup() { } private static void clear() { - mockServer - .clear(request().withMethod("POST").withPath("/aspects").withQueryStringParameter("action", "ingestProposal")); + mockServer.clear( + request() + .withMethod("POST") + .withPath("/aspects") + .withQueryStringParameter("action", "ingestProposal")); } @AfterClass @@ -222,7 +248,8 @@ private static DatasetLineage dsl(String callSite, SparkDataset sink, SparkDatas } private static HdfsPathDataset hdfsDs(String fileName) { - return new HdfsPathDataset("file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); + return new HdfsPathDataset( + "file:" + abs(DATA_DIR + "/" + fileName), DATASET_PLATFORM_INSTANCE, "hdfs", DATASET_ENV); } private static JdbcDataset pgDs(String tbl) { @@ -230,7 +257,8 @@ private static JdbcDataset pgDs(String tbl) { } private static CatalogTableDataset catTblDs(String tbl) { - return new CatalogTableDataset(tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); + return new CatalogTableDataset( + tbl(tbl), DATASET_PLATFORM_INSTANCE, TABLE_PLATFORM, DATASET_ENV); } private static String tbl(String tbl) { @@ -259,8 +287,9 @@ public void test1HdfsInOut() throws Exception { df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); - Dataset df = spark - .sql("select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); + Dataset df = + spark.sql( + "select v1.c1 as a, v1.c2 as b, v2.c1 as c, v2.c2 as d from v1 join v2 on v1.id = v2.id"); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out.csv"); @@ -274,11 +303,21 @@ public void test1HdfsInOut() throws Exception { @Test public void test5HdfsInJdbcOut() throws Exception { - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset df = df1.join(df2, "id").drop("id"); @@ -300,8 +339,13 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo2 values('a', 4);"); c.close(); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset df2 = spark.read().jdbc(db.getJdbcUrl(), "foo2", jdbcConnnProperties); @@ -320,16 +364,30 @@ public void test3HdfsJdbcInJdbcOut() throws Exception { @Test public void test2HdfsInHiveOut() throws Exception { - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); Dataset df = df1.join(df2, "id").drop("id"); - df.write().mode(SaveMode.Overwrite).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand - df.write().mode(SaveMode.Append).saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Overwrite) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand + df.write() + .mode(SaveMode.Append) + .saveAsTable(tbl("foo4")); // CreateDataSourceTableAsSelectCommand df.write().insertInto(tbl("foo4")); // InsertIntoHadoopFsRelationCommand Thread.sleep(5000); @@ -345,18 +403,31 @@ public void test2HdfsInHiveOut() throws Exception { @Test public void test4HiveInHiveOut() throws Exception { - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b"); - - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "c") - .withColumnRenamed("c2", "d"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b"); + + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "c") + .withColumnRenamed("c2", "d"); df1.createOrReplaceTempView("v1"); df2.createOrReplaceTempView("v2"); // CreateHiveTableAsSelectCommand spark.sql( - "create table " + tbl("foo5") + " as " + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); + "create table " + + tbl("foo5") + + " as " + + "(select v1.a, v1.b, v2.c, v2.d from v1 join v2 on v1.id = v2.id)"); check(dsl(catTblDs("foo5"), hdfsDs("in1.csv"), hdfsDs("in2.csv")), acc.getLineages().get(0)); @@ -388,13 +459,23 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { c.createStatement().execute("insert into foo6 values('a', 4);"); c.close(); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); Dataset df2 = spark.read().jdbc(db.getJdbcUrl(), "foo6", jdbcConnnProperties); - Dataset df3 = spark.read().option("header", "true").csv(DATA_DIR + "/in2.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b3"); + Dataset df3 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in2.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b3"); Dataset df = df1.join(df2, "a").drop("id").join(df3, "a"); @@ -402,7 +483,9 @@ public void test6HdfsJdbcInJdbcOutTwoLevel() throws Exception { // JDBCRelation input df.write().mode(SaveMode.Overwrite).jdbc(db.getJdbcUrl(), "foo7", jdbcConnnProperties); Thread.sleep(5000); - check(dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), acc.getLineages().get(0)); + check( + dsl(pgDs("foo7"), hdfsDs("in1.csv"), hdfsDs("in2.csv"), pgDs("foo6")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -413,16 +496,26 @@ public void test7HdfsInPersistHdfsOut() throws Exception { Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in3.csv"); - Dataset df2 = spark.read().option("header", "true").csv(DATA_DIR + "/in4.csv").withColumnRenamed("c2", "d") - .withColumnRenamed("c1", "c").withColumnRenamed("id", "id2"); - Dataset df = df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") - .filter(df1.col("id").equalTo("id_filter")).persist(StorageLevel.MEMORY_ONLY()); + Dataset df2 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in4.csv") + .withColumnRenamed("c2", "d") + .withColumnRenamed("c1", "c") + .withColumnRenamed("id", "id2"); + Dataset df = + df1.join(df2, df1.col("id").equalTo(df2.col("id2")), "inner") + .filter(df1.col("id").equalTo("id_filter")) + .persist(StorageLevel.MEMORY_ONLY()); df.show(); // InsertIntoHadoopFsRelationCommand df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); - check(dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), acc.getLineages().get(0)); + check( + dsl(hdfsDs("out_persist.csv"), hdfsDs("in3.csv"), hdfsDs("in4.csv")), + acc.getLineages().get(0)); if (VERIFY_EXPECTED) { verify(1 * N); } @@ -436,10 +529,19 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { c.createStatement().execute("insert into foo8 values('a', 4);"); c.close(); - Dataset df1 = spark.read().option("header", "true").csv(DATA_DIR + "/in1.csv").withColumnRenamed("c1", "a") - .withColumnRenamed("c2", "b2"); + Dataset df1 = + spark + .read() + .option("header", "true") + .csv(DATA_DIR + "/in1.csv") + .withColumnRenamed("c1", "a") + .withColumnRenamed("c2", "b2"); - Dataset df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.MEMORY_ONLY()); + Dataset df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.MEMORY_ONLY()); Dataset df = df1.join(df2, "a"); @@ -452,19 +554,24 @@ public void test8PersistHdfsJdbcInJdbcOut() throws Exception { verify(1 * N); } } - - // This test cannot be executed individually. It depends upon previous tests to create tables in the database. + + // This test cannot be executed individually. It depends upon previous tests to create tables in + // the database. @Test public void test9PersistJdbcInHdfsOut() throws Exception { Connection c = db.createConnection(""); - + Dataset df1 = spark.read().jdbc(db.getJdbcUrl(), "foo9", jdbcConnnProperties); df1 = df1.withColumnRenamed("b", "b1"); - Dataset df2 = spark.read().jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties).persist(StorageLevel.DISK_ONLY_2()); + Dataset df2 = + spark + .read() + .jdbc(db.getJdbcUrl(), "foo8", jdbcConnnProperties) + .persist(StorageLevel.DISK_ONLY_2()); Dataset df = df1.join(df2, "a"); - + df.write().mode(SaveMode.Overwrite).csv(DATA_DIR + "/out_persist.csv"); Thread.sleep(5000); check(dsl(hdfsDs("out_persist.csv"), pgDs("foo2"), pgDs("foo3")), acc.getLineages().get(0)); @@ -472,7 +579,7 @@ public void test9PersistJdbcInHdfsOut() throws Exception { verify(1 * N); } } - + private static class DatasetLineageAccumulator implements LineageConsumer { boolean closed = false; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java index acd8bff8c8c47..d1c1110329ad8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DeprecationUtils.java @@ -6,36 +6,33 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; - +import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.Objects; public class DeprecationUtils { - private DeprecationUtils() { } + private DeprecationUtils() {} - @Nullable - public static Deprecation getDeprecation( - @Nonnull EntityService entityService, - @Nonnull String urn, - Urn actor, - @Nullable String note, - boolean deprecated, - @Nullable Long decommissionTime - ) { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - urn, - Constants.DEPRECATION_ASPECT_NAME, - entityService, - new Deprecation()); - if (deprecation == null) { - return null; - } - deprecation.setActor(actor); - deprecation.setDeprecated(deprecated); - deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); - deprecation.setNote(Objects.requireNonNullElse(note, "")); - return deprecation; + @Nullable + public static Deprecation getDeprecation( + @Nonnull EntityService entityService, + @Nonnull String urn, + Urn actor, + @Nullable String note, + boolean deprecated, + @Nullable Long decommissionTime) { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + urn, Constants.DEPRECATION_ASPECT_NAME, entityService, new Deprecation()); + if (deprecation == null) { + return null; } + deprecation.setActor(actor); + deprecation.setDeprecated(deprecated); + deprecation.setDecommissionTime(decommissionTime, SetMode.REMOVE_IF_NULL); + deprecation.setNote(Objects.requireNonNullElse(note, "")); + return deprecation; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index dff9a22de8efd..53b974b560e2a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.client; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; @@ -70,578 +73,741 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; - - @Slf4j @RequiredArgsConstructor public class JavaEntityClient implements EntityClient { - private static final int DEFAULT_RETRY_INTERVAL = 2; - private static final int DEFAULT_RETRY_COUNT = 3; - - private final static Set NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); - - private final Clock _clock = Clock.systemUTC(); - - private final EntityService _entityService; - private final DeleteEntityService _deleteEntityService; - private final EntitySearchService _entitySearchService; - private final CachingEntitySearchService _cachingEntitySearchService; - private final SearchService _searchService; - private final LineageSearchService _lineageSearchService; - private final TimeseriesAspectService _timeseriesAspectService; - private final EventProducer _eventProducer; - private final RestliEntityClient _restliEntityClient; - - @Nullable - public EntityResponse getV2( - @Nonnull String entityName, - @Nonnull final Urn urn, - @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } - - @Nonnull - public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { - return _entityService.getEntity(urn, ImmutableSet.of()); - } - - @Nonnull - @Override - public Map batchGetV2( - @Nonnull String entityName, - @Nonnull Set urns, - @Nullable Set aspectNames, - @Nonnull Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } - - @Nonnull - public Map batchGetVersionedV2( - @Nonnull String entityName, - @Nonnull final Set versionedUrns, - @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final Set projectedAspects = aspectNames == null - ? _entityService.getEntityAspectNames(entityName) - : aspectNames; - return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); - } - - @Nonnull - public Map batchGet(@Nonnull final Set urns, @Nonnull final Authentication authentication) { - return _entityService.getEntities(urns, ImmutableSet.of()); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param field field of the dataset to autocomplete against - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType the type of entity to autocomplete against - * @param query search query - * @param requestFilters autocomplete filters - * @param limit max number of autocomplete results - * @throws RemoteInvocationException - */ - @Nonnull - public AutoCompleteResult autoComplete( - @Nonnull String entityType, - @Nonnull String query, - @Nullable Filter requestFilters, - @Nonnull int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _cachingEntitySearchService.autoComplete(entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); - } - - /** - * Gets autocomplete results - * - * @param entityType entity type being browse - * @param path path being browsed - * @param requestFilters browse filters - * @param start start offset of first dataset - * @param limit max number of datasets - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResult browse( - @Nonnull String entityType, - @Nonnull String path, - @Nullable Map requestFilters, - int start, - int limit, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateBrowseResult( - _cachingEntitySearchService.browse(entityType, path, newFilter(requestFilters), start, limit, null), _entityService); - } - - - /** - * Gets browse V2 snapshot of a given path - * - * @param entityName entity being browsed - * @param path path being browsed - * @param filter browse filter - * @param input search query - * @param start start offset of first group - * @param count max number of results requested - * @throws RemoteInvocationException - */ - @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { - // TODO: cache browseV2 results - return _entitySearchService.browseV2(entityName, path, filter, input, start, count); - } - - @SneakyThrows - @Deprecated - public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - Objects.requireNonNull(authentication, "authentication must not be null"); - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntity(entity, auditStamp); - } - - @SneakyThrows - @Deprecated - public void updateWithSystemMetadata( - @Nonnull final Entity entity, - @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - if (systemMetadata == null) { - update(entity, authentication); - return; - } - - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - - _entityService.ingestEntity(entity, auditStamp, systemMetadata); - tryIndexRunId(com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); - } - - @SneakyThrows - @Deprecated - public void batchUpdate(@Nonnull final Set entities, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); - auditStamp.setTime(Clock.systemUTC().millis()); - _entityService.ingestEntities(entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); - } - - /** - * Searches for entities matching to a given query and filters - * - * @param input search query - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param searchFlags - * @return a set of search results - * @throws RemoteInvocationException - */ - @Nonnull - @WithSpan - @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map requestFilters, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - - return ValidationUtils.validateSearchResult(_entitySearchService.search(List.of(entity), input, newFilter(requestFilters), - null, start, count, searchFlags), _entityService); - } - - /** - * Deprecated! Use 'filter' or 'search' instead. - * - * Filters for entities matching to a given query and filters - * - * @param requestFilters search filters - * @param start start offset for search results - * @param count max number of search results requested - * @return a set of list results - * @throws RemoteInvocationException - */ - @Deprecated - @Nonnull - public ListResult list( - @Nonnull String entity, - @Nullable Map requestFilters, - int start, - int count, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateListResult(toListResult( - _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), _entityService); - } - - /** - * Searches for datasets matching to a given query and filters - * - * @param input search query - * @param filter search filters - * @param sortCriterion sort criterion - * @param start start offset for search results - * @param count max number of search results requested - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - @Override - public SearchResult search( - @Nonnull String entity, - @Nonnull String input, - @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, - int start, - int count, - @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) - throws RemoteInvocationException { - return ValidationUtils.validateSearchResult( - _entitySearchService.search(List.of(entity), input, filter, sortCriterion, start, count, searchFlags), _entityService); - } - - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); - } - - /** - * Searches for entities matching to a given query and filters across multiple entity types - * - * @param entities entity types to search (if empty, searches all entities) - * @param input search query - * @param filter search filters - * @param start start offset for search results - * @param count max number of search results requested - * @param facets list of facets we want aggregations for - * @param sortCriterion sorting criterion - * @return Snapshot key - * @throws RemoteInvocationException - */ - @Nonnull - public SearchResult searchAcrossEntities( - @Nonnull List entities, - @Nonnull String input, - @Nullable Filter filter, - int start, - int count, - @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, - @Nonnull final Authentication authentication, - @Nullable List facets) throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateSearchResult( - _searchService.searchAcrossEntities(entities, input, filter, sortCriterion, start, count, finalFlags, facets), _entityService); - } - - @Nonnull - @Override - public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return ValidationUtils.validateScrollResult( - _searchService.scrollAcrossEntities(entities, input, filter, null, scrollId, keepAlive, count, - finalFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, null, null, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return ValidationUtils.validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService); - } - - @Nonnull - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags, - @Nonnull final Authentication authentication) - throws RemoteInvocationException { - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); - return ValidationUtils.validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(sourceUrn, direction, entities, input, maxHops, filter, - sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), _entityService); - } - - /** - * Gets browse path(s) given dataset urn - * - * @param urn urn for the entity - * @return list of paths given urn - * @throws RemoteInvocationException - */ - @Nonnull - public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); - } - - public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.setWritable(canWrite); - } - - @Nonnull - public Map batchGetTotalEntityCount( - @Nonnull List entityNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _searchService.docCountPerEntity(entityNames); - } - - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.listUrns(entityName, start, count); - } - - /** - * Hard delete an entity with a particular urn. - */ - public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - _entityService.deleteUrn(urn); - } - - @Override - public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) - throws RemoteInvocationException { - withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); - } - - @Nonnull - @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return ValidationUtils.validateSearchResult(_entitySearchService.filter(entity, filter, sortCriterion, start, count), - _entityService); - } - - @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - return _entityService.exists(urn); - } - - @SneakyThrows - @Override - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - } - - @SneakyThrows - @Override - public List getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) - throws RemoteInvocationException { - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entity); - response.setAspectName(aspect); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); + private static final int DEFAULT_RETRY_INTERVAL = 2; + private static final int DEFAULT_RETRY_COUNT = 3; + + private static final Set NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + + private final Clock _clock = Clock.systemUTC(); + + private final EntityService _entityService; + private final DeleteEntityService _deleteEntityService; + private final EntitySearchService _entitySearchService; + private final CachingEntitySearchService _cachingEntitySearchService; + private final SearchService _searchService; + private final LineageSearchService _lineageSearchService; + private final TimeseriesAspectService _timeseriesAspectService; + private final EventProducer _eventProducer; + private final RestliEntityClient _restliEntityClient; + + @Nullable + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } + + @Nonnull + public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + return _entityService.getEntity(urn, ImmutableSet.of()); + } + + @Nonnull + @Override + public Map batchGetV2( + @Nonnull String entityName, + @Nonnull Set urns, + @Nullable Set aspectNames, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } + + @Nonnull + public Map batchGetVersionedV2( + @Nonnull String entityName, + @Nonnull final Set versionedUrns, + @Nullable final Set aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + final Set projectedAspects = + aspectNames == null ? _entityService.getEntityAspectNames(entityName) : aspectNames; + return _entityService.getEntitiesVersionedV2(versionedUrns, projectedAspects); + } + + @Nonnull + public Map batchGet( + @Nonnull final Set urns, @Nonnull final Authentication authentication) { + return _entityService.getEntities(urns, ImmutableSet.of()); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param field field of the dataset to autocomplete against + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, field, filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType the type of entity to autocomplete against + * @param query search query + * @param requestFilters autocomplete filters + * @param limit max number of autocomplete results + * @throws RemoteInvocationException + */ + @Nonnull + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _cachingEntitySearchService.autoComplete( + entityType, query, "", filterOrDefaultEmptyFilter(requestFilters), limit, null); + } + + /** + * Gets autocomplete results + * + * @param entityType entity type being browse + * @param path path being browsed + * @param requestFilters browse filters + * @param start start offset of first dataset + * @param limit max number of datasets + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateBrowseResult( + _cachingEntitySearchService.browse( + entityType, path, newFilter(requestFilters), start, limit, null), + _entityService); + } + + /** + * Gets browse V2 snapshot of a given path + * + * @param entityName entity being browsed + * @param path path being browsed + * @param filter browse filter + * @param input search query + * @param start start offset of first group + * @param count max number of results requested + * @throws RemoteInvocationException + */ + @Nonnull + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { + // TODO: cache browseV2 results + return _entitySearchService.browseV2(entityName, path, filter, input, start, count); + } + + @SneakyThrows + @Deprecated + public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + Objects.requireNonNull(authentication, "authentication must not be null"); + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntity(entity, auditStamp); + } + + @SneakyThrows + @Deprecated + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + if (systemMetadata == null) { + update(entity, authentication); + return; + } + + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + + _entityService.ingestEntity(entity, auditStamp, systemMetadata); + tryIndexRunId( + com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()), systemMetadata); + } + + @SneakyThrows + @Deprecated + public void batchUpdate( + @Nonnull final Set entities, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(Urn.createFromString(authentication.getActor().toUrnStr())); + auditStamp.setTime(Clock.systemUTC().millis()); + _entityService.ingestEntities( + entities.stream().collect(Collectors.toList()), auditStamp, ImmutableList.of()); + } + + /** + * Searches for entities matching to a given query and filters + * + * @param input search query + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param searchFlags + * @return a set of search results + * @throws RemoteInvocationException + */ + @Nonnull + @WithSpan + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, newFilter(requestFilters), null, start, count, searchFlags), + _entityService); + } + + /** + * Deprecated! Use 'filter' or 'search' instead. + * + *

Filters for entities matching to a given query and filters + * + * @param requestFilters search filters + * @param start start offset for search results + * @param count max number of search results requested + * @return a set of list results + * @throws RemoteInvocationException + */ + @Deprecated + @Nonnull + public ListResult list( + @Nonnull String entity, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateListResult( + toListResult( + _entitySearchService.filter(entity, newFilter(requestFilters), null, start, count)), + _entityService); + } + + /** + * Searches for datasets matching to a given query and filters + * + * @param input search query + * @param filter search filters + * @param sortCriterion sort criterion + * @param start start offset for search results + * @param count max number of search results requested + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + @Override + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.search( + List.of(entity), input, filter, sortCriterion, start, count, searchFlags), + _entityService); + } + + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + } + + /** + * Searches for entities matching to a given query and filters across multiple entity types + * + * @param entities entity types to search (if empty, searches all entities) + * @param input search query + * @param filter search filters + * @param start start offset for search results + * @param count max number of search results requested + * @param facets list of facets we want aggregations for + * @param sortCriterion sorting criterion + * @return Snapshot key + * @throws RemoteInvocationException + */ + @Nonnull + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List facets) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateSearchResult( + _searchService.searchAcrossEntities( + entities, input, filter, sortCriterion, start, count, finalFlags, facets), + _entityService); + } + + @Nonnull + @Override + public ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return ValidationUtils.validateScrollResult( + _searchService.scrollAcrossEntities( + entities, input, filter, null, scrollId, keepAlive, count, finalFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + null, + null, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService); + } + + @Nonnull + @Override + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true).setSkipCache(true); + return ValidationUtils.validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + sourceUrn, + direction, + entities, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService); + } + + /** + * Gets browse path(s) given dataset urn + * + * @param urn urn for the entity + * @return list of paths given urn + * @throws RemoteInvocationException + */ + @Nonnull + public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return new StringArray(_entitySearchService.getBrowsePaths(urn.getEntityType(), urn)); + } + + public void setWritable(boolean canWrite, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.setWritable(canWrite); + } + + @Nonnull + public Map batchGetTotalEntityCount( + @Nonnull List entityNames, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _searchService.docCountPerEntity(entityNames); + } + + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.listUrns(entityName, start, count); + } + + /** Hard delete an entity with a particular urn. */ + public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + _entityService.deleteUrn(urn); + } + + @Override + public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException { + withRetry(() -> _deleteEntityService.deleteReferencesTo(urn, false), "deleteEntityReferences"); + } + + @Nonnull + @Override + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return ValidationUtils.validateSearchResult( + _entitySearchService.filter(entity, filter, sortCriterion, start, count), _entityService); + } + + @Override + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.exists(urn); + } + + @SneakyThrows + @Override + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + return _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + } + + @SneakyThrows + @Override + public List getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entity); + response.setAspectName(aspect); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (limit != null) { + response.setLimit(limit); + } + if (filter != null) { + response.setFilter(filter); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + Urn.createFromString(urn), + entity, + aspect, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response.getValues(); + } + + // TODO: Factor out ingest logic into a util that can be accessed by the java client and the + // resource + @Override + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { + String actorUrnStr = + authentication.getActor() != null + ? authentication.getActor().toUrnStr() + : Constants.UNKNOWN_ACTOR; + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); + final List additionalChanges = + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); + + Stream proposalStream = + Stream.concat(Stream.of(metadataChangeProposal), additionalChanges.stream()); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) + .build(); + + IngestResult one = + _entityService.ingestProposal(batch, auditStamp, async).stream().findFirst().get(); + + Urn urn = one.getUrn(); + tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); + return urn.toString(); + } + + @SneakyThrows + @Override + public Optional getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class aspectClass, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity != null && entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + if (rawAspect.containsKey(aspectClass.getCanonicalName())) { + DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); + return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (limit != null) { - response.setLimit(limit); - } - if (filter != null) { - response.setFilter(filter); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(Urn.createFromString(urn), entity, aspect, startTimeMillis, - endTimeMillis, limit, filter, sort))); - return response.getValues(); - } - - // TODO: Factor out ingest logic into a util that can be accessed by the java client and the resource - @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { - String actorUrnStr = authentication.getActor() != null ? authentication.getActor().toUrnStr() : Constants.UNKNOWN_ACTOR; - final AuditStamp auditStamp = - new AuditStamp().setTime(_clock.millis()).setActor(UrnUtils.getUrn(actorUrnStr)); - final List additionalChanges = - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService); - - Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - additionalChanges.stream()); - AspectsBatch batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - - IngestResult one = _entityService.ingestProposal(batch, auditStamp, async).stream() - .findFirst().get(); - - Urn urn = one.getUrn(); - tryIndexRunId(urn, metadataChangeProposal.getSystemMetadata()); - return urn.toString(); - } - - @SneakyThrows - @Override - public Optional getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class aspectClass, @Nonnull final Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity != null && entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - if (rawAspect.containsKey(aspectClass.getCanonicalName())) { - DataMap aspectDataMap = rawAspect.getDataMap(aspectClass.getCanonicalName()); - return Optional.of(RecordUtils.toRecordTemplate(aspectClass, aspectDataMap)); - } - } - return Optional.empty(); } - - @SneakyThrows - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Authentication authentication) throws RemoteInvocationException { - VersionedAspect entity = _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); - if (entity == null) { - return null; + return Optional.empty(); + } + + @SneakyThrows + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { + VersionedAspect entity = + _entityService.getVersionedAspect(Urn.createFromString(urn), aspect, version); + if (entity == null) { + return null; + } + + if (entity.hasAspect()) { + DataMap rawAspect = ((DataMap) entity.data().get("aspect")); + return rawAspect; + } + + return null; + } + + @Override + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception { + _eventProducer.producePlatformEvent(name, key, event); + } + + @Override + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception { + _restliEntityClient.rollbackIngestion(runId, authentication); + } + + private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { + if (systemMetadata != null && systemMetadata.hasRunId()) { + _entitySearchService.appendRunId( + entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); + } + } + + protected T withRetry(@Nonnull final Supplier block, @Nullable String counterPrefix) { + final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); + int attemptCount = 0; + + while (attemptCount < DEFAULT_RETRY_COUNT + 1) { + try { + return block.get(); + } catch (Throwable ex) { + MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + + if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { + throw ex; + } else { + attemptCount = attemptCount + 1; + try { + Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } } - - if (entity.hasAspect()) { - DataMap rawAspect = ((DataMap) entity.data().get("aspect")); - return rawAspect; - } - - return null; - } - - @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception { - _eventProducer.producePlatformEvent(name, key, event); - } - - @Override - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception { - _restliEntityClient.rollbackIngestion(runId, authentication); + } } - private void tryIndexRunId(Urn entityUrn, @Nullable SystemMetadata systemMetadata) { - if (systemMetadata != null && systemMetadata.hasRunId()) { - _entitySearchService.appendRunId(entityUrn.getEntityType(), entityUrn, systemMetadata.getRunId()); - } - } + // Should never hit this line. + throw new IllegalStateException("No JavaEntityClient call executed."); + } - protected T withRetry(@Nonnull final Supplier block, @Nullable String counterPrefix) { - final BackoffPolicy backoffPolicy = new ExponentialBackoff(DEFAULT_RETRY_INTERVAL); - int attemptCount = 0; - - while (attemptCount < DEFAULT_RETRY_COUNT + 1) { - try { - return block.get(); - } catch (Throwable ex) { - MetricUtils.counter(this.getClass(), buildMetricName(ex, counterPrefix)).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); - - if (attemptCount == DEFAULT_RETRY_COUNT || skipRetry) { - throw ex; - } else { - attemptCount = attemptCount + 1; - try { - Thread.sleep(backoffPolicy.nextBackoff(attemptCount, ex) * 1000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } - } + private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { + StringBuilder builder = new StringBuilder(); - // Should never hit this line. - throw new IllegalStateException("No JavaEntityClient call executed."); + // deleteEntityReferences_failures + if (counterPrefix != null) { + builder.append(counterPrefix).append(MetricUtils.DELIMITER); } - private String buildMetricName(Throwable throwable, @Nullable String counterPrefix) { - StringBuilder builder = new StringBuilder(); - - // deleteEntityReferences_failures - if (counterPrefix != null) { - builder.append(counterPrefix).append(MetricUtils.DELIMITER); - } - - return builder.append("exception") - .append(MetricUtils.DELIMITER) - .append(throwable.getClass().getName()) - .toString(); - } + return builder + .append("exception") + .append(MetricUtils.DELIMITER) + .append(throwable.getClass().getName()) + .toString(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java index 6b5a3d5bfb06e..0ac18b4aacc04 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/SystemJavaEntityClient.java @@ -2,9 +2,9 @@ import com.datahub.authentication.Authentication; import com.linkedin.entity.client.EntityClientCache; -import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.event.EventProducer; @@ -15,25 +15,37 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import lombok.Getter; - -/** - * Java backed SystemEntityClient - */ +/** Java backed SystemEntityClient */ @Getter public class SystemJavaEntityClient extends JavaEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemJavaEntityClient(EntityService entityService, DeleteEntityService deleteEntityService, - EntitySearchService entitySearchService, CachingEntitySearchService cachingEntitySearchService, - SearchService searchService, LineageSearchService lineageSearchService, - TimeseriesAspectService timeseriesAspectService, EventProducer eventProducer, - RestliEntityClient restliEntityClient, Authentication systemAuthentication, - EntityClientCacheConfig cacheConfig) { - super(entityService, deleteEntityService, entitySearchService, cachingEntitySearchService, searchService, - lineageSearchService, timeseriesAspectService, eventProducer, restliEntityClient); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemJavaEntityClient( + EntityService entityService, + DeleteEntityService deleteEntityService, + EntitySearchService entitySearchService, + CachingEntitySearchService cachingEntitySearchService, + SearchService searchService, + LineageSearchService lineageSearchService, + TimeseriesAspectService timeseriesAspectService, + EventProducer eventProducer, + RestliEntityClient restliEntityClient, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super( + entityService, + deleteEntityService, + entitySearchService, + cachingEntitySearchService, + searchService, + lineageSearchService, + timeseriesAspectService, + eventProducer, + restliEntityClient); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemJavaEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java index 660c1291a5651..c740f8562d8fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/AspectKey.java @@ -5,19 +5,13 @@ import lombok.NonNull; import lombok.Value; - -/** - * A value class that holds the components of a key for metadata retrieval. - */ +/** A value class that holds the components of a key for metadata retrieval. */ @Value public class AspectKey { - @NonNull - Class aspectClass; + @NonNull Class aspectClass; - @NonNull - URN urn; + @NonNull URN urn; - @NonNull - Long version; + @NonNull Long version; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java index 7acb9ca0cbd64..999140759b09b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/dao/BaseReadDAO.java @@ -15,7 +15,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public abstract class BaseReadDAO { public static final long FIRST_VERSION = 0; @@ -41,12 +40,10 @@ public BaseReadDAO(@Nonnull Set> aspects) { * @return a mapping of given keys to the corresponding metadata aspect. */ @Nonnull - public abstract Map, Optional> get( - @Nonnull Set> keys); + public abstract Map, Optional> + get(@Nonnull Set> keys); - /** - * Similar to {@link #get(Set)} but only using only one {@link AspectKey}. - */ + /** Similar to {@link #get(Set)} but only using only one {@link AspectKey}. */ @Nonnull public Optional get(@Nonnull AspectKey key) { return (Optional) get(Collections.singleton(key)).get(key); @@ -56,21 +53,21 @@ public Optional get(@Nonnull AspectKey Optional get(@Nonnull Class aspectClass, @Nonnull URN urn, - long version) { + public Optional get( + @Nonnull Class aspectClass, @Nonnull URN urn, long version) { return get(new AspectKey<>(aspectClass, urn, version)); } - /** - * Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. - */ + /** Similar to {@link #get(Class, Urn, long)} but always retrieves the latest version. */ @Nonnull - public Optional get(@Nonnull Class aspectClass, @Nonnull URN urn) { + public Optional get( + @Nonnull Class aspectClass, @Nonnull URN urn) { return get(aspectClass, urn, LATEST_VERSION); } /** - * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated with multiple URNs. + * Similar to {@link #get(Class, Urn)} but retrieves multiple aspects latest versions associated + * with multiple URNs. * *

The returned {@link Map} contains all the . */ @@ -85,20 +82,22 @@ public Map, Optional, Optional>> results = new HashMap<>(); - get(keys).entrySet().forEach(entry -> { - final AspectKey key = entry.getKey(); - final URN urn = key.getUrn(); - results.putIfAbsent(urn, new HashMap<>()); - results.get(urn).put(key.getAspectClass(), entry.getValue()); - }); + final Map, Optional>> + results = new HashMap<>(); + get(keys) + .entrySet() + .forEach( + entry -> { + final AspectKey key = entry.getKey(); + final URN urn = key.getUrn(); + results.putIfAbsent(urn, new HashMap<>()); + results.get(urn).put(key.getAspectClass(), entry.getValue()); + }); return results; } - /** - * Similar to {@link #get(Set, Set)} but only for one URN. - */ + /** Similar to {@link #get(Set, Set)} but only for one URN. */ @Nonnull public Map, Optional> get( @Nonnull Set> aspectClasses, @Nonnull URN urn) { @@ -112,16 +111,15 @@ public Map, Optional> return results.get(urn); } - /** - * Similar to {@link #get(Set, Set)} but only for one aspect. - */ + /** Similar to {@link #get(Set, Set)} but only for one aspect. */ @Nonnull public Map> get( @Nonnull Class aspectClass, @Nonnull Set urns) { - return get(Collections.singleton(aspectClass), urns).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> (Optional) entry.getValue().get(aspectClass))); + return get(Collections.singleton(aspectClass), urns).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> (Optional) entry.getValue().get(aspectClass))); } protected void checkValidAspect(@Nonnull Class aspectClass) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java index 42dd3f0405a6a..ae27f9f7e6f1a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectDao.java @@ -7,150 +7,162 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import io.ebean.PagedList; import io.ebean.Transaction; - -import java.util.stream.Stream; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** - * An interface specifying create, update, and read operations against metadata entity aspects. - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting any underlying - * storage system to be used. + * An interface specifying create, update, and read operations against metadata entity aspects. This + * interface is meant to abstract away the storage concerns of these pieces of metadata, permitting + * any underlying storage system to be used. * - * Requirements for any implementation: - * 1. Being able to map its internal storage representation to {@link EntityAspect}; - * 2. Honor the internal versioning semantics. The latest version of any aspect is set to 0 for efficient retrieval. - * In most cases only the latest state of an aspect will be fetched. See {@link EntityServiceImpl} for more details. + *

Requirements for any implementation: 1. Being able to map its internal storage representation + * to {@link EntityAspect}; 2. Honor the internal versioning semantics. The latest version of any + * aspect is set to 0 for efficient retrieval. In most cases only the latest state of an aspect will + * be fetched. See {@link EntityServiceImpl} for more details. * - * TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link EntityServiceImpl} concerns - * itself with batching multiple commands into a single transaction. It exposes storage concerns somewhat and it'd be - * worth looking into ways to move this responsibility inside {@link AspectDao} implementations. + *

TODO: This interface exposes {@link #runInTransactionWithRetry(Supplier, int)} because {@link + * EntityServiceImpl} concerns itself with batching multiple commands into a single transaction. It + * exposes storage concerns somewhat and it'd be worth looking into ways to move this responsibility + * inside {@link AspectDao} implementations. */ public interface AspectDao { - String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; - String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; - - @Nullable - EntityAspect getAspect(@Nonnull final String urn, @Nonnull final String aspectName, final long version); - - @Nullable - EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); - - @Nonnull - Map batchGet(@Nonnull final Set keys); - - @Nonnull - List getAspectsInRange(@Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis); - - @Nullable - default EntityAspect getLatestAspect(@Nonnull final String urn, @Nonnull final String aspectName) { - return getLatestAspects(Map.of(urn, Set.of(aspectName))).getOrDefault(urn, Map.of()) - .getOrDefault(aspectName, null); - } - - @Nonnull - Map> getLatestAspects(Map> urnAspects); - - void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert); - - void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); - - long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion); - - void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); - - @Nonnull - ListResult listUrns( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - Integer countAspect( - @Nonnull final String aspectName, - @Nullable String urnLike); - - @Nonnull - PagedList getPagedAspects(final RestoreIndicesArgs args); - - @Nonnull - Stream streamAspects(String entityName, String aspectName); - - int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); - - @Nonnull - ListResult listLatestAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final int start, - final int pageSize); - - @Nonnull - ListResult listAspectMetadata( - @Nonnull final String entityName, - @Nonnull final String aspectName, - final long version, - final int start, - final int pageSize); - - Map> getNextVersions(@Nonnull Map> urnAspectMap); - - default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - return getNextVersions(urn, Set.of(aspectName)).get(aspectName); - } - - default Map getNextVersions(@Nonnull final String urn, @Nonnull final Set aspectNames) { - return getNextVersions(Map.of(urn, aspectNames)).get(urn); - } - - long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); - - void setWritable(boolean canWrite); - - @Nonnull - T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry); - - @Nonnull - default T runInTransactionWithRetry(@Nonnull final Function block, AspectsBatch batch, - final int maxTransactionRetry) { - return runInTransactionWithRetry(block, maxTransactionRetry); - } - - default void incrementWriteMetrics(String aspectName, long count, long bytes) { - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))).inc(count); - MetricUtils.counter(this.getClass(), - String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))).inc(bytes); - } + String ASPECT_WRITE_COUNT_METRIC_NAME = "aspectWriteCount"; + String ASPECT_WRITE_BYTES_METRIC_NAME = "aspectWriteBytes"; + + @Nullable + EntityAspect getAspect( + @Nonnull final String urn, @Nonnull final String aspectName, final long version); + + @Nullable + EntityAspect getAspect(@Nonnull final EntityAspectIdentifier key); + + @Nonnull + Map batchGet( + @Nonnull final Set keys); + + @Nonnull + List getAspectsInRange( + @Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis); + + @Nullable + default EntityAspect getLatestAspect( + @Nonnull final String urn, @Nonnull final String aspectName) { + return getLatestAspects(Map.of(urn, Set.of(aspectName))) + .getOrDefault(urn, Map.of()) + .getOrDefault(aspectName, null); + } + + @Nonnull + Map> getLatestAspects(Map> urnAspects); + + void saveAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert); + + void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert); + + long saveLatestAspect( + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion); + + void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect); + + @Nonnull + ListResult listUrns( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + Integer countAspect(@Nonnull final String aspectName, @Nullable String urnLike); + + @Nonnull + PagedList getPagedAspects(final RestoreIndicesArgs args); + + @Nonnull + Stream streamAspects(String entityName, String aspectName); + + int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn); + + @Nonnull + ListResult listLatestAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final int start, + final int pageSize); + + @Nonnull + ListResult listAspectMetadata( + @Nonnull final String entityName, + @Nonnull final String aspectName, + final long version, + final int start, + final int pageSize); + + Map> getNextVersions(@Nonnull Map> urnAspectMap); + + default long getNextVersion(@Nonnull final String urn, @Nonnull final String aspectName) { + return getNextVersions(urn, Set.of(aspectName)).get(aspectName); + } + + default Map getNextVersions( + @Nonnull final String urn, @Nonnull final Set aspectNames) { + return getNextVersions(Map.of(urn, aspectNames)).get(urn); + } + + long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName); + + void setWritable(boolean canWrite); + + @Nonnull + T runInTransactionWithRetry( + @Nonnull final Function block, final int maxTransactionRetry); + + @Nonnull + default T runInTransactionWithRetry( + @Nonnull final Function block, + AspectsBatch batch, + final int maxTransactionRetry) { + return runInTransactionWithRetry(block, maxTransactionRetry); + } + + default void incrementWriteMetrics(String aspectName, long count, long bytes) { + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_COUNT_METRIC_NAME, aspectName))) + .inc(count); + MetricUtils.counter( + this.getClass(), + String.join(MetricUtils.DELIMITER, List.of(ASPECT_WRITE_BYTES_METRIC_NAME, aspectName))) + .inc(bytes); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java index c16a41cbaf84b..485eb2b1af943 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/AspectMigrationsDao.java @@ -3,14 +3,15 @@ import javax.annotation.Nonnull; /** - * This interface is a split-off from {@link AspectDao} to segregate the methods that are only called by data migration - * tasks. This separation is not technically necessary, but it felt dangerous to leave entire-table queries mixed - * with the rest. + * This interface is a split-off from {@link AspectDao} to segregate the methods that are only + * called by data migration tasks. This separation is not technically necessary, but it felt + * dangerous to leave entire-table queries mixed with the rest. */ public interface AspectMigrationsDao { /** * Return a paged list of _all_ URNs in the database. + * * @param start Start offset of a page. * @param pageSize Number of records in a page. * @return An iterable of {@code String} URNs. @@ -20,14 +21,17 @@ public interface AspectMigrationsDao { /** * Return the count of entities (unique URNs) in the database. + * * @return Count of entities. */ long countEntities(); /** * Check if any record of given {@param aspectName} exists in the database. + * * @param aspectName Name of an entity aspect to search for. - * @return {@code true} if at least one record of given {@param aspectName} is found. {@code false} otherwise. + * @return {@code true} if at least one record of given {@param aspectName} is found. {@code + * false} otherwise. */ boolean checkIfAspectExists(@Nonnull final String aspectName); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java index 8296edd615aad..eaf9b1a2cc415 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspect.java @@ -1,19 +1,18 @@ package com.linkedin.metadata.entity; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and {@link AspectDao} - * implementations are using. While {@link AspectDao} implementations have their own aspect record implementations, - * they cary implementation details that should not leak outside. Therefore, this is the type to use in public - * {@link AspectDao} methods. + * This is an internal representation of an entity aspect record {@link EntityServiceImpl} and + * {@link AspectDao} implementations are using. While {@link AspectDao} implementations have their + * own aspect record implementations, they cary implementation details that should not leak outside. + * Therefore, this is the type to use in public {@link AspectDao} methods. */ @Getter @Setter @@ -22,25 +21,23 @@ @EqualsAndHashCode public class EntityAspect { - @Nonnull - private String urn; + @Nonnull private String urn; - @Nonnull - private String aspect; + @Nonnull private String aspect; - private long version; + private long version; - private String metadata; + private String metadata; - private String systemMetadata; + private String systemMetadata; - private Timestamp createdOn; + private Timestamp createdOn; - private String createdBy; + private String createdBy; - private String createdFor; + private String createdFor; - public EntityAspectIdentifier toAspectIdentifier() { - return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); - } + public EntityAspectIdentifier toAspectIdentifier() { + return new EntityAspectIdentifier(getUrn(), getAspect(), getVersion()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java index cb360192c0120..887bd3910310d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityAspectIdentifier.java @@ -6,10 +6,10 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - /** - * This class holds values required to construct a unique key to identify an entity aspect record in a database. - * Its existence started mainly for compatibility with {@link com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} + * This class holds values required to construct a unique key to identify an entity aspect record in + * a database. Its existence started mainly for compatibility with {@link + * com.linkedin.metadata.entity.ebean.EbeanAspectV2.PrimaryKey} */ @Value @Slf4j @@ -19,10 +19,12 @@ public class EntityAspectIdentifier { long version; public static EntityAspectIdentifier fromEbean(EbeanAspectV2 ebeanAspectV2) { - return new EntityAspectIdentifier(ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); + return new EntityAspectIdentifier( + ebeanAspectV2.getUrn(), ebeanAspectV2.getAspect(), ebeanAspectV2.getVersion()); } public static EntityAspectIdentifier fromCassandra(CassandraAspect cassandraAspect) { - return new EntityAspectIdentifier(cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); + return new EntityAspectIdentifier( + cassandraAspect.getUrn(), cassandraAspect.getAspect(), cassandraAspect.getVersion()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index 57f88e31deea5..a333839416556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.Timer; -import com.linkedin.data.template.GetMode; -import com.linkedin.data.template.SetMode; -import com.linkedin.entity.client.SystemEntityClient; -import com.linkedin.metadata.config.PreProcessHooks; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ModelConversionException; import com.google.common.collect.ImmutableList; @@ -22,7 +22,9 @@ import com.linkedin.common.urn.VersionedUrnUtils; import com.linkedin.data.schema.TyperefDataSchema; import com.linkedin.data.template.DataTemplateUtil; +import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; import com.linkedin.data.template.UnionTemplate; @@ -32,19 +34,21 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.Aspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.ebean.transactions.PatchBatchItem; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; +import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; @@ -67,7 +71,7 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import io.ebean.PagedList; - +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -84,51 +88,45 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.persistence.EntityNotFoundException; - -import io.ebean.Transaction; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.BrowsePathUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - /** - * A class specifying create, update, and read operations against metadata entities and aspects - * by primary key (urn). + * A class specifying create, update, and read operations against metadata entities and aspects by + * primary key (urn). * - * This interface is meant to abstract away the storage concerns of these pieces of metadata, permitting - * any underlying storage system to be used in materializing GMS domain objects, which are implemented using Pegasus - * {@link RecordTemplate}s. + *

This interface is meant to abstract away the storage concerns of these pieces of metadata, + * permitting any underlying storage system to be used in materializing GMS domain objects, which + * are implemented using Pegasus {@link RecordTemplate}s. * - * Internal versioning semantics - * ============================= + *

Internal versioning semantics ============================= * - * The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest state of an aspect - * will be the only fetched. + *

The latest version of any aspect is set to 0 for efficient retrieval; in most cases the latest + * state of an aspect will be the only fetched. * - * As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions will increment - * monotonically, starting from 1. Thus, the second-to-last version of an aspect will be equal to total # versions - * of the aspect - 1. + *

As such, 0 is treated as a special number. Once an aspect is no longer the latest, versions + * will increment monotonically, starting from 1. Thus, the second-to-last version of an aspect will + * be equal to total # versions of the aspect - 1. * - * For example, if there are 5 instances of a single aspect, the latest will have version 0, and the second-to-last - * will have version 4. The "true" latest version of an aspect is always equal to the highest stored version - * of a given aspect + 1. + *

For example, if there are 5 instances of a single aspect, the latest will have version 0, and + * the second-to-last will have version 4. The "true" latest version of an aspect is always equal to + * the highest stored version of a given aspect + 1. * - * Note that currently, implementations of this interface are responsible for producing Metadata Change Log on - * ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, SystemMetadata, RecordTemplate, SystemMetadata, - * MetadataChangeProposal, Urn, AuditStamp, AspectSpec)}. + *

Note that currently, implementations of this interface are responsible for producing Metadata + * Change Log on ingestion using {@link #conditionallyProduceMCLAsync(RecordTemplate, + * SystemMetadata, RecordTemplate, SystemMetadata, MetadataChangeProposal, Urn, AuditStamp, + * AspectSpec)}. * - * TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this class. + *

TODO: Consider whether we can abstract away virtual versioning semantics to subclasses of this + * class. */ @Slf4j public class EntityServiceImpl implements EntityService { @@ -137,7 +135,6 @@ public class EntityServiceImpl implements EntityService { * As described above, the latest version of an aspect should always take the value 0, with * monotonically increasing version incrementing as usual once the latest version is replaced. */ - private static final int DEFAULT_MAX_TRANSACTION_RETRY = 3; protected final AspectDao _aspectDao; @@ -146,8 +143,7 @@ public class EntityServiceImpl implements EntityService { private final Map> _entityToValidAspects; private RetentionService _retentionService; private final Boolean _alwaysEmitChangeLog; - @Getter - private final UpdateIndicesService _updateIndicesService; + @Getter private final UpdateIndicesService _updateIndicesService; private final PreProcessHooks _preProcessHooks; protected static final int MAX_KEYS_PER_QUERY = 500; @@ -160,17 +156,24 @@ public EntityServiceImpl( final boolean alwaysEmitChangeLog, final UpdateIndicesService updateIndicesService, final PreProcessHooks preProcessHooks) { - this(aspectDao, producer, entityRegistry, alwaysEmitChangeLog, updateIndicesService, preProcessHooks, DEFAULT_MAX_TRANSACTION_RETRY); + this( + aspectDao, + producer, + entityRegistry, + alwaysEmitChangeLog, + updateIndicesService, + preProcessHooks, + DEFAULT_MAX_TRANSACTION_RETRY); } public EntityServiceImpl( - @Nonnull final AspectDao aspectDao, - @Nonnull final EventProducer producer, - @Nonnull final EntityRegistry entityRegistry, - final boolean alwaysEmitChangeLog, - final UpdateIndicesService updateIndicesService, - final PreProcessHooks preProcessHooks, - final Integer retry) { + @Nonnull final AspectDao aspectDao, + @Nonnull final EventProducer producer, + @Nonnull final EntityRegistry entityRegistry, + final boolean alwaysEmitChangeLog, + final UpdateIndicesService updateIndicesService, + final PreProcessHooks preProcessHooks, + final Integer retry) { _aspectDao = aspectDao; _producer = producer; @@ -188,8 +191,8 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { } /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set @@ -197,8 +200,7 @@ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { */ @Override public Map> getLatestAspects( - @Nonnull final Set urns, - @Nonnull final Set aspectNames) { + @Nonnull final Set urns, @Nonnull final Set aspectNames) { Map batchGetResults = getLatestAspect(urns, aspectNames); @@ -211,69 +213,88 @@ public Map> getLatestAspects( } // Add "key" aspects for each urn. TODO: Replace this with a materialized key aspect. - urnToAspects.keySet().forEach(key -> { - final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); - urnToAspects.get(key).add(keyAspect); - }); - - batchGetResults.forEach((key, aspectEntry) -> { - final Urn urn = toUrn(key.getUrn()); - final String aspectName = key.getAspect(); - // for now, don't add the key aspect here- we have already added it above - if (aspectName.equals(getKeyAspectName(urn))) { - return; - } + urnToAspects + .keySet() + .forEach( + key -> { + final RecordTemplate keyAspect = EntityUtils.buildKeyAspect(_entityRegistry, key); + urnToAspects.get(key).add(keyAspect); + }); + + batchGetResults.forEach( + (key, aspectEntry) -> { + final Urn urn = toUrn(key.getUrn()); + final String aspectName = key.getAspect(); + // for now, don't add the key aspect here- we have already added it above + if (aspectName.equals(getKeyAspectName(urn))) { + return; + } - final RecordTemplate aspectRecord = - EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - urnToAspects.putIfAbsent(urn, new ArrayList<>()); - urnToAspects.get(urn).add(aspectRecord); - }); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + urnToAspects.putIfAbsent(urn, new ArrayList<>()); + urnToAspects.get(urn).add(aspectRecord); + }); return urnToAspects; } @Nonnull @Override - public Map getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { - Map batchGetResults = getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); + public Map getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set aspectNames) { + Map batchGetResults = + getLatestAspect(new HashSet<>(Arrays.asList(urn)), aspectNames); final Map result = new HashMap<>(); - batchGetResults.forEach((key, aspectEntry) -> { - final String aspectName = key.getAspect(); - final RecordTemplate aspectRecord = EntityUtils.toAspectRecord(urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); - result.put(aspectName, aspectRecord); - }); + batchGetResults.forEach( + (key, aspectEntry) -> { + final String aspectName = key.getAspect(); + final RecordTemplate aspectRecord = + EntityUtils.toAspectRecord( + urn, aspectName, aspectEntry.getMetadata(), getEntityRegistry()); + result.put(aspectName, aspectRecord); + }); return result; } /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + *

Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ @Nullable @Override - public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + public RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { - log.debug("Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); - return maybeAspect.map( - aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())).orElse(null); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + return maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + .orElse(null); } /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -285,13 +306,14 @@ public RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String as public EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set aspectNames) throws URISyntaxException { + @Nonnull final Set aspectNames) + throws URISyntaxException { return getEntitiesV2(entityName, Collections.singleton(urn), aspectNames).get(urn); } /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -302,29 +324,31 @@ public EntityResponse getEntityV2( public Map getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set urns, - @Nonnull final Set aspectNames) throws URISyntaxException { - return getLatestEnvelopedAspects(entityName, urns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set aspectNames) + throws URISyntaxException { + return getLatestEnvelopedAspects(entityName, urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ @Override public Map getEntitiesVersionedV2( - @Nonnull final Set versionedUrns, - @Nonnull final Set aspectNames) throws URISyntaxException { - return getVersionedEnvelopedAspects(versionedUrns, aspectNames) - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); + @Nonnull final Set versionedUrns, @Nonnull final Set aspectNames) + throws URISyntaxException { + return getVersionedEnvelopedAspects(versionedUrns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toEntityResponse(entry.getKey(), entry.getValue()))); } /** @@ -338,16 +362,21 @@ public Map getEntitiesVersionedV2( @Override public Map> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set urns, - @Nonnull Set aspectNames) throws URISyntaxException { - - final Set dbKeys = urns.stream() - .map(urn -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + @Nonnull String entityName, @Nonnull Set urns, @Nonnull Set aspectNames) + throws URISyntaxException { + + final Set dbKeys = + urns.stream() + .map( + urn -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); return getCorrespondingAspects(dbKeys, urns); } @@ -355,61 +384,86 @@ public Map> getLatestEnvelopedAspects( /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ @Override public Map> getVersionedEnvelopedAspects( - @Nonnull Set versionedUrns, - @Nonnull Set aspectNames) throws URISyntaxException { + @Nonnull Set versionedUrns, @Nonnull Set aspectNames) + throws URISyntaxException { - Map> urnAspectVersionMap = versionedUrns.stream() - .collect(Collectors.toMap(versionedUrn -> versionedUrn.getUrn().toString(), - versionedUrn -> VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); + Map> urnAspectVersionMap = + versionedUrns.stream() + .collect( + Collectors.toMap( + versionedUrn -> versionedUrn.getUrn().toString(), + versionedUrn -> + VersionedUrnUtils.convertVersionStamp(versionedUrn.getVersionStamp()))); // Cover full/partial versionStamp - final Set dbKeys = urnAspectVersionMap.entrySet().stream() - .filter(entry -> !entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .filter(aspectName -> entry.getValue().containsKey(aspectName)) - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, - entry.getValue().get(aspectName))) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet()); + final Set dbKeys = + urnAspectVersionMap.entrySet().stream() + .filter(entry -> !entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .filter(aspectName -> entry.getValue().containsKey(aspectName)) + .map( + aspectName -> + new EntityAspectIdentifier( + entry.getKey(), aspectName, entry.getValue().get(aspectName))) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet()); // Cover empty versionStamp - dbKeys.addAll(urnAspectVersionMap.entrySet().stream() - .filter(entry -> entry.getValue().isEmpty()) - .map(entry -> aspectNames.stream() - .map(aspectName -> new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) - .collect(Collectors.toList())) - .flatMap(List::stream) - .collect(Collectors.toSet())); - - return getCorrespondingAspects(dbKeys, versionedUrns.stream() - .map(versionedUrn -> versionedUrn.getUrn().toString()) - .map(UrnUtils::getUrn).collect(Collectors.toSet())); - } - - private Map> getCorrespondingAspects(Set dbKeys, Set urns) { - - final Map envelopedAspectMap = getEnvelopedAspects(dbKeys); + dbKeys.addAll( + urnAspectVersionMap.entrySet().stream() + .filter(entry -> entry.getValue().isEmpty()) + .map( + entry -> + aspectNames.stream() + .map( + aspectName -> + new EntityAspectIdentifier(entry.getKey(), aspectName, 0L)) + .collect(Collectors.toList())) + .flatMap(List::stream) + .collect(Collectors.toSet())); + + return getCorrespondingAspects( + dbKeys, + versionedUrns.stream() + .map(versionedUrn -> versionedUrn.getUrn().toString()) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet())); + } + + private Map> getCorrespondingAspects( + Set dbKeys, Set urns) { + + final Map envelopedAspectMap = + getEnvelopedAspects(dbKeys); // Group result by Urn - final Map> urnToAspects = envelopedAspectMap.entrySet() - .stream() - .collect(Collectors.groupingBy(entry -> entry.getKey().getUrn(), - Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); + final Map> urnToAspects = + envelopedAspectMap.entrySet().stream() + .collect( + Collectors.groupingBy( + entry -> entry.getKey().getUrn(), + Collectors.mapping(Map.Entry::getValue, Collectors.toList()))); final Map> result = new HashMap<>(); for (Urn urn : urns) { - List aspects = urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); + List aspects = + urnToAspects.getOrDefault(urn.toString(), Collections.emptyList()); EnvelopedAspect keyAspect = getKeyEnvelopedAspect(urn); // Add key aspect if it does not exist in the returned aspects - if (aspects.isEmpty() || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { - result.put(urn, ImmutableList.builder().addAll(aspects).add(keyAspect).build()); + if (aspects.isEmpty() + || aspects.stream().noneMatch(aspect -> keyAspect.getName().equals(aspect.getName()))) { + result.put( + urn, ImmutableList.builder().addAll(aspects).add(keyAspect).build()); } else { result.put(urn, aspects); } @@ -427,33 +481,42 @@ private Map> getCorrespondingAspects(Set envelopedAspect.getName().equals(aspectName)) .findFirst() .orElse(null); } - /** - * Retrieves an {@link VersionedAspect}, or null if one cannot be found. - */ + /** Retrieves an {@link VersionedAspect}, or null if one cannot be found. */ @Nullable @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { - log.debug("Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", urn, aspectName, version); + log.debug( + "Invoked getVersionedAspect with urn: {}, aspectName: {}, version: {}", + urn, + aspectName, + version); VersionedAspect result = new VersionedAspect(); version = calculateVersionNumber(urn, aspectName, version); - final EntityAspectIdentifier primaryKey = new EntityAspectIdentifier(urn.toString(), aspectName, version); - final Optional maybeAspect = Optional.ofNullable(_aspectDao.getAspect(primaryKey)); + final EntityAspectIdentifier primaryKey = + new EntityAspectIdentifier(urn.toString(), aspectName, version); + final Optional maybeAspect = + Optional.ofNullable(_aspectDao.getAspect(primaryKey)); RecordTemplate aspectRecord = - maybeAspect.map(aspect -> EntityUtils.toAspectRecord(urn, aspectName, aspect.getMetadata(), getEntityRegistry())) + maybeAspect + .map( + aspect -> + EntityUtils.toAspectRecord( + urn, aspectName, aspect.getMetadata(), getEntityRegistry())) .orElse(null); if (aspectRecord == null) { @@ -472,8 +535,8 @@ public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspe /** * Retrieves a list of all aspects belonging to an entity of a particular type, sorted by urn. * - * Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, - * we can drop support for this unless otherwise required. Only visible for backwards compatibility. + *

Note that once we drop support for legacy 'getAllDataPlatforms' endpoint, we can drop + * support for this unless otherwise required. Only visible for backwards compatibility. * * @param entityName name of the entity type the aspect belongs to, e.g. 'dataset' * @param aspectName name of the aspect requested, e.g. 'ownership' @@ -489,25 +552,39 @@ public ListResult listLatestAspects( final int start, final int count) { - log.debug("Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", entityName, - aspectName, start, count); + log.debug( + "Invoked listLatestAspects with entityName: {}, aspectName: {}, start: {}, count: {}", + entityName, + aspectName, + start, + count); final ListResult aspectMetadataList = _aspectDao.listLatestAspectMetadata(entityName, aspectName, start, count); final List aspects = new ArrayList<>(); for (int i = 0; i < aspectMetadataList.getValues().size(); i++) { - aspects.add(EntityUtils.toAspectRecord(aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), aspectName, - aspectMetadataList.getValues().get(i), getEntityRegistry())); + aspects.add( + EntityUtils.toAspectRecord( + aspectMetadataList.getMetadata().getExtraInfos().get(i).getUrn(), + aspectName, + aspectMetadataList.getValues().get(i), + getEntityRegistry())); } - return new ListResult<>(aspects, aspectMetadataList.getMetadata(), aspectMetadataList.getNextStart(), - aspectMetadataList.isHasNext(), aspectMetadataList.getTotalCount(), aspectMetadataList.getTotalPageCount(), + return new ListResult<>( + aspects, + aspectMetadataList.getMetadata(), + aspectMetadataList.getNextStart(), + aspectMetadataList.isHasNext(), + aspectMetadataList.getTotalCount(), + aspectMetadataList.getTotalPageCount(), aspectMetadataList.getPageSize()); } /** * Common batch-like pattern used primarily in tests. + * * @param entityUrn the entity urn * @param pairList list of aspects in pairs of aspect name and record template * @param auditStamp audit stamp @@ -515,38 +592,46 @@ public ListResult listLatestAspects( * @return update result */ @Override - public List ingestAspects(@Nonnull Urn entityUrn, - List> pairList, - @Nonnull final AuditStamp auditStamp, - SystemMetadata systemMetadata) { - List items = pairList.stream() - .map(pair -> UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)) + public List ingestAspects( + @Nonnull Urn entityUrn, + List> pairList, + @Nonnull final AuditStamp auditStamp, + SystemMetadata systemMetadata) { + List items = + pairList.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) .collect(Collectors.toList()); return ingestAspects(AspectsBatchImpl.builder().items(items).build(), auditStamp, true, true); } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataChangeLog}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataChangeLog}. * * @param aspectsBatch aspects to write * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time - * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in correspondence upon - * successful update + * @param emitMCL whether a {@link com.linkedin.mxe.MetadataChangeLog} should be emitted in + * correspondence upon successful update * @return the {@link RecordTemplate} representation of the written aspect object */ @Override - public List ingestAspects(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean emitMCL, - boolean overwrite) { - - Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List ingestResults = ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); + public List ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite) { + + Timer.Context ingestToLocalDBTimer = + MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); + List ingestResults = + ingestAspectsToLocalDB(aspectsBatch, auditStamp, overwrite); List mclResults = emitMCL(ingestResults, emitMCL); ingestToLocalDBTimer.stop(); @@ -554,135 +639,197 @@ public List ingestAspects(@Nonnull final AspectsBatch aspect } /** - * Checks whether there is an actual update to the aspect by applying the updateLambda - * If there is an update, push the new version into the local DB. - * Otherwise, do not push the new version, but just update the system metadata. + * Checks whether there is an actual update to the aspect by applying the updateLambda If there is + * an update, push the new version into the local DB. Otherwise, do not push the new version, but + * just update the system metadata. * - * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of the aspect being - * inserted, and a function to apply to the latest version of the aspect to get the updated version + * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of + * the aspect being inserted, and a function to apply to the latest version of the aspect to + * get the updated version * @param auditStamp an {@link AuditStamp} containing metadata about the writer & current time * @return Details about the new and old version of the aspect */ @Nonnull - private List ingestAspectsToLocalDB(@Nonnull final AspectsBatch aspectsBatch, - @Nonnull final AuditStamp auditStamp, - boolean overwrite) { + private List ingestAspectsToLocalDB( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean overwrite) { if (aspectsBatch.containsDuplicateAspects()) { log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); } - return _aspectDao.runInTransactionWithRetry((tx) -> { - // Read before write is unfortunate, however batch it - Map> urnAspects = aspectsBatch.getUrnAspectsMap(); - // read #1 - Map> latestAspects = _aspectDao.getLatestAspects(urnAspects); - // read #2 - Map> nextVersions = _aspectDao.getNextVersions(urnAspects); - - List items = aspectsBatch.getItems().stream() - .map(item -> { - if (item instanceof UpsertBatchItem) { - return (UpsertBatchItem) item; - } else { - // patch to upsert - PatchBatchItem patchBatchItem = (PatchBatchItem) item; - final String urnStr = patchBatchItem.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(patchBatchItem.getAspectName()); - final RecordTemplate currentValue = latest != null - ? EntityUtils.toAspectRecord(patchBatchItem.getUrn(), patchBatchItem.getAspectName(), latest.getMetadata(), _entityRegistry) : null; - return patchBatchItem.applyPatch(_entityRegistry, currentValue); - } - }) - .collect(Collectors.toList()); - - // Database Upsert results - List upsertResults = items.stream() - .map(item -> { - final String urnStr = item.getUrn().toString(); - final EntityAspect latest = latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); - final long nextVersion = nextVersions.getOrDefault(urnStr, Map.of()).getOrDefault(item.getAspectName(), 0L); - - final UpdateAspectResult result; - if (overwrite || latest == null) { - result = ingestAspectToLocalDB(tx, item.getUrn(), item.getAspectName(), item.getAspect(), - auditStamp, item.getSystemMetadata(), latest, nextVersion).toBuilder().request(item).build(); - - // support inner-batch upserts - latestAspects.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); - nextVersions.computeIfAbsent(urnStr, key -> new HashMap<>()).put(item.getAspectName(), nextVersion + 1); - } else { - RecordTemplate oldValue = EntityUtils.toAspectRecord(item.getUrn().getEntityType(), item.getAspectName(), - latest.getMetadata(), getEntityRegistry()); - SystemMetadata oldMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - result = UpdateAspectResult.builder() - .urn(item.getUrn()) - .request(item) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(oldMetadata) - .newSystemMetadata(oldMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(latest.getVersion()) - .build(); - } - - return result; - }).collect(Collectors.toList()); - - // commit upserts prior to retention or kafka send, if supported by impl - if (tx != null) { - tx.commitAndContinue(); - } + return _aspectDao.runInTransactionWithRetry( + (tx) -> { + // Read before write is unfortunate, however batch it + Map> urnAspects = aspectsBatch.getUrnAspectsMap(); + // read #1 + Map> latestAspects = + _aspectDao.getLatestAspects(urnAspects); + // read #2 + Map> nextVersions = _aspectDao.getNextVersions(urnAspects); + + List items = + aspectsBatch.getItems().stream() + .map( + item -> { + if (item instanceof UpsertBatchItem) { + return (UpsertBatchItem) item; + } else { + // patch to upsert + PatchBatchItem patchBatchItem = (PatchBatchItem) item; + final String urnStr = patchBatchItem.getUrn().toString(); + final EntityAspect latest = + latestAspects + .getOrDefault(urnStr, Map.of()) + .get(patchBatchItem.getAspectName()); + final RecordTemplate currentValue = + latest != null + ? EntityUtils.toAspectRecord( + patchBatchItem.getUrn(), + patchBatchItem.getAspectName(), + latest.getMetadata(), + _entityRegistry) + : null; + return patchBatchItem.applyPatch(_entityRegistry, currentValue); + } + }) + .collect(Collectors.toList()); + + // Database Upsert results + List upsertResults = + items.stream() + .map( + item -> { + final String urnStr = item.getUrn().toString(); + final EntityAspect latest = + latestAspects.getOrDefault(urnStr, Map.of()).get(item.getAspectName()); + final long nextVersion = + nextVersions + .getOrDefault(urnStr, Map.of()) + .getOrDefault(item.getAspectName(), 0L); + + final UpdateAspectResult result; + if (overwrite || latest == null) { + result = + ingestAspectToLocalDB( + tx, + item.getUrn(), + item.getAspectName(), + item.getAspect(), + auditStamp, + item.getSystemMetadata(), + latest, + nextVersion) + .toBuilder() + .request(item) + .build(); + + // support inner-batch upserts + latestAspects + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), item.toLatestEntityAspect(auditStamp)); + nextVersions + .computeIfAbsent(urnStr, key -> new HashMap<>()) + .put(item.getAspectName(), nextVersion + 1); + } else { + RecordTemplate oldValue = + EntityUtils.toAspectRecord( + item.getUrn().getEntityType(), + item.getAspectName(), + latest.getMetadata(), + getEntityRegistry()); + SystemMetadata oldMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + result = + UpdateAspectResult.builder() + .urn(item.getUrn()) + .request(item) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(oldMetadata) + .newSystemMetadata(oldMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(latest.getVersion()) + .build(); + } + + return result; + }) + .collect(Collectors.toList()); + + // commit upserts prior to retention or kafka send, if supported by impl + if (tx != null) { + tx.commitAndContinue(); + } - // Retention optimization and tx - if (_retentionService != null) { - List retentionBatch = upsertResults.stream() - // Only consider retention when there was a previous version - .filter(result -> latestAspects.containsKey(result.getUrn().toString()) - && latestAspects.get(result.getUrn().toString()).containsKey(result.getRequest().getAspectName())) - .filter(result -> { - RecordTemplate oldAspect = result.getOldValue(); - RecordTemplate newAspect = result.getNewValue(); - // Apply retention policies if there was an update to existing aspect value - return oldAspect != newAspect && oldAspect != null && _retentionService != null; - }) - .map(result -> RetentionService.RetentionContext.builder() - .urn(result.getUrn()) - .aspectName(result.getRequest().getAspectName()) - .maxVersion(Optional.of(result.getMaxVersion())) - .build()) - .collect(Collectors.toList()); - _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); - } else { - log.warn("Retention service is missing!"); - } + // Retention optimization and tx + if (_retentionService != null) { + List retentionBatch = + upsertResults.stream() + // Only consider retention when there was a previous version + .filter( + result -> + latestAspects.containsKey(result.getUrn().toString()) + && latestAspects + .get(result.getUrn().toString()) + .containsKey(result.getRequest().getAspectName())) + .filter( + result -> { + RecordTemplate oldAspect = result.getOldValue(); + RecordTemplate newAspect = result.getNewValue(); + // Apply retention policies if there was an update to existing aspect + // value + return oldAspect != newAspect + && oldAspect != null + && _retentionService != null; + }) + .map( + result -> + RetentionService.RetentionContext.builder() + .urn(result.getUrn()) + .aspectName(result.getRequest().getAspectName()) + .maxVersion(Optional.of(result.getMaxVersion())) + .build()) + .collect(Collectors.toList()); + _retentionService.applyRetentionWithPolicyDefaults(retentionBatch); + } else { + log.warn("Retention service is missing!"); + } - return upsertResults; - }, aspectsBatch, DEFAULT_MAX_TRANSACTION_RETRY); + return upsertResults; + }, + aspectsBatch, + DEFAULT_MAX_TRANSACTION_RETRY); } @Nonnull private List emitMCL(List sqlResults, boolean emitMCL) { - List withEmitMCL = sqlResults.stream() + List withEmitMCL = + sqlResults.stream() .map(result -> emitMCL ? conditionallyProduceMCLAsync(result) : result) .collect(Collectors.toList()); // join futures messages, capture error state - List> statusPairs = withEmitMCL.stream() + List> statusPairs = + withEmitMCL.stream() .filter(result -> result.getMclFuture() != null) - .map(result -> { - try { - result.getMclFuture().get(); - return Pair.of(true, result); - } catch (InterruptedException | ExecutionException e) { - return Pair.of(false, result); - } - }).collect(Collectors.toList()); + .map( + result -> { + try { + result.getMclFuture().get(); + return Pair.of(true, result); + } catch (InterruptedException | ExecutionException e) { + return Pair.of(false, result); + } + }) + .collect(Collectors.toList()); if (statusPairs.stream().anyMatch(p -> !p.getFirst())) { - log.error("Failed to produce MCLs: {}", statusPairs.stream() + log.error( + "Failed to produce MCLs: {}", + statusPairs.stream() .filter(p -> !p.getFirst()) .map(Pair::getValue) .map(v -> v.getRequest().toString()) @@ -695,12 +842,14 @@ private List emitMCL(List sqlResults, bo } /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + *

This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + *

Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -711,15 +860,22 @@ private List emitMCL(List sqlResults, bo */ @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, - @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, - @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", urn, aspectName, newValue); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .one(UpsertBatchItem.builder() + public RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestAspectIfNotPresent with urn: {}, aspectName: {}, newValue: {}", + urn, + aspectName, + newValue); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .one( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(newValue) @@ -733,150 +889,208 @@ public RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, /** * Wrapper around batch method for single item + * * @param proposal the proposal * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { - return ingestProposal(AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), auditStamp, - async).stream().findFirst().get(); + public IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async) { + return ingestProposal( + AspectsBatchImpl.builder().mcps(List.of(proposal), getEntityRegistry()).build(), + auditStamp, + async) + .stream() + .findFirst() + .get(); } /** - * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any additional aspects or do any - * enrichment, instead it changes only those which are provided inside the metadata change proposal. + * Ingest a new {@link MetadataChangeProposal}. Note that this method does NOT include any + * additional aspects or do any enrichment, instead it changes only those which are provided + * inside the metadata change proposal. * - * Do not use this method directly for creating new entities, as it DOES NOT create an Entity Key aspect in the DB. Instead, - * use an Entity Client. + *

Do not use this method directly for creating new entities, as it DOES NOT create an Entity + * Key aspect in the DB. Instead, use an Entity Client. * * @param aspectsBatch the proposals to ingest * @param auditStamp an audit stamp representing the time and actor proposing the change - * @param async a flag to control whether we commit to primary store or just write to proposal log before returning + * @param async a flag to control whether we commit to primary store or just write to proposal log + * before returning * @return an {@link IngestResult} containing the results */ @Override - public Set ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { + public Set ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async) { - Stream timeseriesIngestResults = ingestTimeseriesProposal(aspectsBatch, auditStamp); - Stream nonTimeseriesIngestResults = async ? ingestProposalAsync(aspectsBatch) - : ingestProposalSync(aspectsBatch, auditStamp); + Stream timeseriesIngestResults = + ingestTimeseriesProposal(aspectsBatch, auditStamp); + Stream nonTimeseriesIngestResults = + async ? ingestProposalAsync(aspectsBatch) : ingestProposalSync(aspectsBatch, auditStamp); - return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults).collect(Collectors.toSet()); + return Stream.concat(timeseriesIngestResults, nonTimeseriesIngestResults) + .collect(Collectors.toSet()); } /** * Timeseries is pass through to MCL, no MCP + * * @param aspectsBatch timeseries upserts batch * @param auditStamp provided audit information * @return returns ingest proposal result, however was never in the MCP topic */ - private Stream ingestTimeseriesProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - List unsupported = aspectsBatch.getItems().stream() - .filter(item -> item.getAspectSpec().isTimeseries() && item.getChangeType() != ChangeType.UPSERT) + private Stream ingestTimeseriesProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + List unsupported = + aspectsBatch.getItems().stream() + .filter( + item -> + item.getAspectSpec().isTimeseries() + && item.getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(AbstractBatchItem::getChangeType).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(AbstractBatchItem::getChangeType) + .collect(Collectors.toSet())); } - List, Boolean>>>> timeseriesResults = aspectsBatch.getItems().stream() + List, Boolean>>>> timeseriesResults = + aspectsBatch.getItems().stream() .filter(item -> item.getAspectSpec().isTimeseries()) .map(item -> (UpsertBatchItem) item) - .map(item -> Pair.of(item, conditionallyProduceMCLAsync(null, null, item.getAspect(), item.getSystemMetadata(), - item.getMetadataChangeProposal(), item.getUrn(), auditStamp, item.getAspectSpec()))) + .map( + item -> + Pair.of( + item, + conditionallyProduceMCLAsync( + null, + null, + item.getAspect(), + item.getSystemMetadata(), + item.getMetadataChangeProposal(), + item.getUrn(), + auditStamp, + item.getAspectSpec()))) .collect(Collectors.toList()); - return timeseriesResults.stream().map(result -> { - Optional, Boolean>> emissionStatus = result.getSecond(); - - emissionStatus.ifPresent(status -> { - try { - status.getFirst().get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); - - UpsertBatchItem request = result.getFirst(); - return IngestResult.builder() - .urn(request.getUrn()) - .request(request) - .publishedMCL(emissionStatus.map(status -> status.getFirst() != null).orElse(false)) - .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) - .build(); - }); + return timeseriesResults.stream() + .map( + result -> { + Optional, Boolean>> emissionStatus = result.getSecond(); + + emissionStatus.ifPresent( + status -> { + try { + status.getFirst().get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + UpsertBatchItem request = result.getFirst(); + return IngestResult.builder() + .urn(request.getUrn()) + .request(request) + .publishedMCL( + emissionStatus.map(status -> status.getFirst() != null).orElse(false)) + .processedMCL(emissionStatus.map(Pair::getSecond).orElse(false)) + .build(); + }); } /** * For async ingestion of non-timeseries, any change type + * * @param aspectsBatch non-timeseries ingest aspects * @return produced items to the MCP topic */ private Stream ingestProposalAsync(AspectsBatch aspectsBatch) { - List nonTimeseries = aspectsBatch.getItems().stream() + List nonTimeseries = + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList()); - List> futures = nonTimeseries.stream().map(item -> - // When async is turned on, we write to proposal log and return without waiting - _producer.produceMetadataChangeProposal(item.getUrn(), item.getMetadataChangeProposal())) + List> futures = + nonTimeseries.stream() + .map( + item -> + // When async is turned on, we write to proposal log and return without waiting + _producer.produceMetadataChangeProposal( + item.getUrn(), item.getMetadataChangeProposal())) .filter(Objects::nonNull) .collect(Collectors.toList()); try { - return nonTimeseries.stream().map(item -> - IngestResult.builder() + return nonTimeseries.stream() + .map( + item -> + IngestResult.builder() .urn(item.getUrn()) .request(item) .publishedMCP(true) .build()); } finally { - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } } - private Stream ingestProposalSync(AspectsBatch aspectsBatch, AuditStamp auditStamp) { - AspectsBatchImpl nonTimeseries = AspectsBatchImpl.builder() - .items(aspectsBatch.getItems().stream() + private Stream ingestProposalSync( + AspectsBatch aspectsBatch, AuditStamp auditStamp) { + AspectsBatchImpl nonTimeseries = + AspectsBatchImpl.builder() + .items( + aspectsBatch.getItems().stream() .filter(item -> !item.getAspectSpec().isTimeseries()) .collect(Collectors.toList())) .build(); - List unsupported = nonTimeseries.getItems().stream() - .filter(item -> item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH - && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) + List unsupported = + nonTimeseries.getItems().stream() + .filter( + item -> + item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH + && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { - throw new UnsupportedOperationException("ChangeType not supported: " + unsupported.stream() - .map(item -> item.getMetadataChangeProposal().getChangeType()).collect(Collectors.toSet())); + throw new UnsupportedOperationException( + "ChangeType not supported: " + + unsupported.stream() + .map(item -> item.getMetadataChangeProposal().getChangeType()) + .collect(Collectors.toSet())); } List upsertResults = ingestAspects(nonTimeseries, auditStamp, true, true); - return upsertResults.stream().map(result -> { - AbstractBatchItem item = result.getRequest(); + return upsertResults.stream() + .map( + result -> { + AbstractBatchItem item = result.getRequest(); - return IngestResult.builder() - .urn(item.getUrn()) - .request(item) - .publishedMCL(result.getMclFuture() != null) - .sqlCommitted(true) - .isUpdate(result.getOldValue() != null) - .build(); - }); + return IngestResult.builder() + .urn(item.getUrn()) + .request(item) + .publishedMCL(result.getMclFuture() != null) + .sqlCommitted(true) + .isUpdate(result.getOldValue() != null) + .build(); + }); } @Override - public String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn) { + public String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn) { BulkApplyRetentionArgs args = new BulkApplyRetentionArgs(); if (start == null) { start = 0; @@ -900,7 +1114,8 @@ private boolean preprocessEvent(MetadataChangeLog metadataChangeLog) { if (_preProcessHooks.isUiEnabled()) { if (metadataChangeLog.getSystemMetadata() != null) { if (metadataChangeLog.getSystemMetadata().getProperties() != null) { - if (UI_SOURCE.equals(metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { + if (UI_SOURCE.equals( + metadataChangeLog.getSystemMetadata().getProperties().get(APP_SOURCE))) { // Pre-process the update indices hook for UI updates to avoid perceived lag from Kafka _updateIndicesService.handleChangeEvent(metadataChangeLog); return true; @@ -918,19 +1133,24 @@ public Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLi @Nonnull @Override - public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger) { + public RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger) { RestoreIndicesResult result = new RestoreIndicesResult(); int ignored = 0; int rowsMigrated = 0; logger.accept(String.format("Args are %s", args)); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table started.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table started.", + args.start, args.start + args.batchSize)); long startTime = System.currentTimeMillis(); PagedList rows = _aspectDao.getPagedAspects(args); result.timeSqlQueryMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - logger.accept(String.format( - "Reading rows %s through %s from the aspects table completed.", args.start, args.start + args.batchSize)); + logger.accept( + String.format( + "Reading rows %s through %s from the aspects table completed.", + args.start, args.start + args.batchSize)); LinkedList> futures = new LinkedList<>(); @@ -942,8 +1162,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - logger.accept(String.format("Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", - aspect.getKey().getUrn(), e)); + logger.accept( + String.format( + "Failed to bind Urn with value %s into Urn object: %s. Ignoring row.", + aspect.getKey().getUrn(), e)); ignored = ignored + 1; continue; } @@ -956,8 +1178,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - logger.accept(String.format("Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", - entityName, e)); + logger.accept( + String.format( + "Failed to find entity with name %s in Entity Registry: %s. Ignoring row.", + entityName, e)); ignored = ignored + 1; continue; } @@ -968,8 +1192,10 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 3. Verify that the aspect is a valid aspect associated with the entity AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - logger.accept(String.format("Failed to find aspect with name %s associated with entity named %s", aspectName, - entityName)); + logger.accept( + String.format( + "Failed to find aspect with name %s associated with entity named %s", + aspectName, entityName)); ignored = ignored + 1; continue; } @@ -979,10 +1205,14 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No // 4. Create record from json aspect final RecordTemplate aspectRecord; try { - aspectRecord = EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + aspectRecord = + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - logger.accept(String.format("Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", - aspect.getMetadata(), entityName, aspectName, e)); + logger.accept( + String.format( + "Failed to deserialize row %s for entity %s, aspect %s: %s. Ignoring row.", + aspect.getMetadata(), entityName, aspectName, e)); ignored = ignored + 1; continue; } @@ -990,32 +1220,50 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No startTime = System.currentTimeMillis(); // Force indexing to skip diff mode and fix error states - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); - StringMap properties = latestSystemMetadata.getProperties() != null ? latestSystemMetadata.getProperties() - : new StringMap(); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(aspect.getSystemMetadata()); + StringMap properties = + latestSystemMetadata.getProperties() != null + ? latestSystemMetadata.getProperties() + : new StringMap(); properties.put(FORCE_INDEXING_KEY, Boolean.TRUE.toString()); latestSystemMetadata.setProperties(properties); // 5. Produce MAE events for the aspect record - futures.add(alwaysProduceMCLAsync(urn, entityName, aspectName, aspectSpec, null, aspectRecord, null, - latestSystemMetadata, - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - ChangeType.RESTATE).getFirst()); + futures.add( + alwaysProduceMCLAsync( + urn, + entityName, + aspectName, + aspectSpec, + null, + aspectRecord, + null, + latestSystemMetadata, + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + ChangeType.RESTATE) + .getFirst()); result.sendMessageMs += System.currentTimeMillis() - startTime; rowsMigrated++; } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); try { TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } result.ignored = ignored; result.rowsMigrated = rowsMigrated; @@ -1030,12 +1278,16 @@ public RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @No * @param count the count */ @Override - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count) { - log.debug("Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); + public ListUrnsResult listUrns( + @Nonnull final String entityName, final int start, final int count) { + log.debug( + "Invoked listUrns with entityName: {}, start: {}, count: {}", entityName, start, count); // If a keyAspect exists, the entity exists. - final String keyAspectName = getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); - final ListResult keyAspectList = _aspectDao.listUrns(entityName, keyAspectName, start, count); + final String keyAspectName = + getEntityRegistry().getEntitySpec(entityName).getKeyAspectSpec().getName(); + final ListResult keyAspectList = + _aspectDao.listUrns(entityName, keyAspectName, start, count); final ListUrnsResult result = new ListUrnsResult(); result.setStart(start); @@ -1048,8 +1300,8 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start try { entityUrns.add(Urn.createFromString(urn)); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to convert urn %s found in db to Urn object.", urn), - e); + throw new IllegalArgumentException( + String.format("Failed to convert urn %s found in db to Urn object.", urn), e); } } result.setEntities(entityUrns); @@ -1057,17 +1309,20 @@ public ListUrnsResult listUrns(@Nonnull final String entityName, final int start } /** - * Default implementations. Subclasses should feel free to override if it's more efficient to do so. + * Default implementations. Subclasses should feel free to override if it's more efficient to do + * so. */ @Override public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { - return getEntities(Collections.singleton(urn), aspectNames).values().stream().findFirst().orElse(null); + return getEntities(Collections.singleton(urn), aspectNames).values().stream() + .findFirst() + .orElse(null); } /** * Deprecated! Use getEntitiesV2 instead. * - * Retrieves multiple entities. + *

Retrieves multiple entities. * * @param urns set of urns to fetch * @param aspectNames set of aspects to fetch @@ -1075,70 +1330,115 @@ public Entity getEntity(@Nonnull final Urn urn, @Nonnull final Set aspec */ @Deprecated @Override - public Map getEntities(@Nonnull final Set urns, @Nonnull Set aspectNames) { + public Map getEntities( + @Nonnull final Set urns, @Nonnull Set aspectNames) { log.debug("Invoked getEntities with urns {}, aspects {}", urns, aspectNames); if (urns.isEmpty()) { return Collections.emptyMap(); } - return getSnapshotUnions(urns, aspectNames).entrySet() - .stream() + return getSnapshotUnions(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toEntity(entry.getValue()))); } @Override - public Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog) { + public Pair, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final MetadataChangeLog metadataChangeLog) { Future future = _producer.produceMetadataChangeLog(urn, aspectSpec, metadataChangeLog); return Pair.of(future, preprocessEvent(metadataChangeLog)); } @Override - public Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType) { - final MetadataChangeLog metadataChangeLog = constructMCL(null, entityName, urn, changeType, aspectName, auditStamp, - newAspectValue, newSystemMetadata, oldAspectValue, oldSystemMetadata); + public Pair, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType) { + final MetadataChangeLog metadataChangeLog = + constructMCL( + null, + entityName, + urn, + changeType, + aspectName, + auditStamp, + newAspectValue, + newSystemMetadata, + oldAspectValue, + oldSystemMetadata); return alwaysProduceMCLAsync(urn, aspectSpec, metadataChangeLog); } - public Optional, Boolean>> conditionallyProduceMCLAsync(@Nullable RecordTemplate oldAspect, - @Nullable SystemMetadata oldSystemMetadata, - RecordTemplate newAspect, SystemMetadata newSystemMetadata, - @Nullable MetadataChangeProposal mcp, Urn entityUrn, - AuditStamp auditStamp, AspectSpec aspectSpec) { + public Optional, Boolean>> conditionallyProduceMCLAsync( + @Nullable RecordTemplate oldAspect, + @Nullable SystemMetadata oldSystemMetadata, + RecordTemplate newAspect, + SystemMetadata newSystemMetadata, + @Nullable MetadataChangeProposal mcp, + Urn entityUrn, + AuditStamp auditStamp, + AspectSpec aspectSpec) { boolean isNoOp = oldAspect == newAspect; if (!isNoOp || _alwaysEmitChangeLog || shouldAspectEmitChangeLog(aspectSpec)) { - log.debug("Producing MetadataChangeLog for ingested aspect {}, urn {}", aspectSpec.getName(), entityUrn); - - final MetadataChangeLog metadataChangeLog = constructMCL(mcp, urnToEntityName(entityUrn), entityUrn, - isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, aspectSpec.getName(), auditStamp, newAspect, newSystemMetadata, - oldAspect, oldSystemMetadata); + log.debug( + "Producing MetadataChangeLog for ingested aspect {}, urn {}", + aspectSpec.getName(), + entityUrn); + + final MetadataChangeLog metadataChangeLog = + constructMCL( + mcp, + urnToEntityName(entityUrn), + entityUrn, + isNoOp ? ChangeType.RESTATE : ChangeType.UPSERT, + aspectSpec.getName(), + auditStamp, + newAspect, + newSystemMetadata, + oldAspect, + oldSystemMetadata); log.debug("Serialized MCL event: {}", metadataChangeLog); - Pair, Boolean> emissionStatus = alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); + Pair, Boolean> emissionStatus = + alwaysProduceMCLAsync(entityUrn, aspectSpec, metadataChangeLog); return emissionStatus.getFirst() != null ? Optional.of(emissionStatus) : Optional.empty(); } else { log.debug( - "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", - aspectSpec.getName(), entityUrn); + "Skipped producing MetadataChangeLog for ingested aspect {}, urn {}. Aspect has not changed.", + aspectSpec.getName(), + entityUrn); return Optional.empty(); } } private UpdateAspectResult conditionallyProduceMCLAsync(UpdateAspectResult result) { AbstractBatchItem request = result.getRequest(); - Optional, Boolean>> emissionStatus = conditionallyProduceMCLAsync(result.getOldValue(), result.getOldSystemMetadata(), - result.getNewValue(), result.getNewSystemMetadata(), - request.getMetadataChangeProposal(), result.getUrn(), result.getAuditStamp(), request.getAspectSpec()); - - return emissionStatus.map(status -> - result.toBuilder() - .mclFuture(status.getFirst()) - .processedMCL(status.getSecond()) - .build() - ).orElse(result); + Optional, Boolean>> emissionStatus = + conditionallyProduceMCLAsync( + result.getOldValue(), + result.getOldSystemMetadata(), + result.getNewValue(), + result.getNewSystemMetadata(), + request.getMetadataChangeProposal(), + result.getUrn(), + result.getAuditStamp(), + request.getAspectSpec()); + + return emissionStatus + .map( + status -> + result.toBuilder() + .mclFuture(status.getFirst()) + .processedMCL(status.getSecond()) + .build()) + .orElse(result); } @Override @@ -1148,10 +1448,15 @@ public RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final Str } @Override - public void ingestEntities(@Nonnull final List entities, @Nonnull final AuditStamp auditStamp, + public void ingestEntities( + @Nonnull final List entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List systemMetadata) { log.debug("Invoked ingestEntities with entities {}, audit stamp {}", entities, auditStamp); - Streams.zip(entities.stream(), systemMetadata.stream(), (a, b) -> new Pair(a, b)) + Streams.zip( + entities.stream(), + systemMetadata.stream(), + (a, b) -> new Pair(a, b)) .forEach(pair -> ingestEntity(pair.getFirst(), auditStamp, pair.getSecond())); } @@ -1166,42 +1471,50 @@ public SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp) { } @Override - public void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, - @Nonnull SystemMetadata systemMetadata) { - log.debug("Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", entity, auditStamp, systemMetadata.toString()); + public void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, + @Nonnull SystemMetadata systemMetadata) { + log.debug( + "Invoked ingestEntity with entity {}, audit stamp {} systemMetadata {}", + entity, + auditStamp, + systemMetadata.toString()); ingestSnapshotUnion(entity.getValue(), auditStamp, systemMetadata); } @Nonnull - protected Map getSnapshotUnions(@Nonnull final Set urns, @Nonnull final Set aspectNames) { - return getSnapshotRecords(urns, aspectNames).entrySet() - .stream() + protected Map getSnapshotUnions( + @Nonnull final Set urns, @Nonnull final Set aspectNames) { + return getSnapshotRecords(urns, aspectNames).entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotUnion(entry.getValue()))); } @Nonnull - protected Map getSnapshotRecords(@Nonnull final Set urns, - @Nonnull final Set aspectNames) { - return getLatestAspectUnions(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); + protected Map getSnapshotRecords( + @Nonnull final Set urns, @Nonnull final Set aspectNames) { + return getLatestAspectUnions(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, entry -> toSnapshotRecord(entry.getKey(), entry.getValue()))); } @Nonnull protected Map> getLatestAspectUnions( - @Nonnull final Set urns, - @Nonnull final Set aspectNames) { - return getLatestAspects(urns, aspectNames).entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue() - .stream() - .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) - .collect(Collectors.toList()))); + @Nonnull final Set urns, @Nonnull final Set aspectNames) { + return getLatestAspects(urns, aspectNames).entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> + entry.getValue().stream() + .map(aspectRecord -> toAspectUnion(entry.getKey(), aspectRecord)) + .collect(Collectors.toList()))); } /** - Returns true if entityType should have some aspect as per its definition - but aspects given does not have that aspect + * Returns true if entityType should have some aspect as per its definition but aspects given does + * not have that aspect */ private boolean isAspectMissing(String entityType, String aspectName, Set aspects) { return _entityRegistry.getEntitySpec(entityType).getAspectSpecMap().containsKey(aspectName) @@ -1209,32 +1522,37 @@ private boolean isAspectMissing(String entityType, String aspectName, Set>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map includedAspects) { + public Pair>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map includedAspects) { List> returnAspects = new ArrayList<>(); final String keyAspectName = getKeyAspectName(urn); - final Map latestAspects = new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); + final Map latestAspects = + new HashMap<>(getLatestAspectsForUrn(urn, Set.of(keyAspectName))); // key aspect: does not exist in database && is being written - boolean generateDefaults = !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); + boolean generateDefaults = + !latestAspects.containsKey(keyAspectName) && includedAspects.containsKey(keyAspectName); // conditionally generate defaults if (generateDefaults) { String entityType = urnToEntityName(urn); Set aspectsToGet = new HashSet<>(); - boolean shouldCheckBrowsePath = isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePath = + isAspectMissing(entityType, BROWSE_PATHS_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePath) { aspectsToGet.add(BROWSE_PATHS_ASPECT_NAME); } - boolean shouldCheckBrowsePathV2 = isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckBrowsePathV2 = + isAspectMissing(entityType, BROWSE_PATHS_V2_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckBrowsePathV2) { aspectsToGet.add(BROWSE_PATHS_V2_ASPECT_NAME); } - boolean shouldCheckDataPlatform = isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); + boolean shouldCheckDataPlatform = + isAspectMissing(entityType, DATA_PLATFORM_INSTANCE_ASPECT_NAME, includedAspects.keySet()); if (shouldCheckDataPlatform) { aspectsToGet.add(DATA_PLATFORM_INSTANCE_ASPECT_NAME); } @@ -1242,8 +1560,9 @@ public Pair>> generateDefaultAspectsO // fetch additional aspects latestAspects.putAll(getLatestAspectsForUrn(urn, aspectsToGet)); - if (shouldCheckBrowsePath && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { + if (shouldCheckBrowsePath + && latestAspects.get(BROWSE_PATHS_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_ASPECT_NAME)) { try { BrowsePaths generatedBrowsePath = buildDefaultBrowsePath(urn); returnAspects.add(Pair.of(BROWSE_PATHS_ASPECT_NAME, generatedBrowsePath)); @@ -1252,8 +1571,9 @@ public Pair>> generateDefaultAspectsO } } - if (shouldCheckBrowsePathV2 && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null - && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { + if (shouldCheckBrowsePathV2 + && latestAspects.get(BROWSE_PATHS_V2_ASPECT_NAME) == null + && !includedAspects.containsKey(BROWSE_PATHS_V2_ASPECT_NAME)) { try { BrowsePathsV2 generatedBrowsePathV2 = buildDefaultBrowsePathV2(urn, false); returnAspects.add(Pair.of(BROWSE_PATHS_V2_ASPECT_NAME, generatedBrowsePathV2)); @@ -1262,11 +1582,13 @@ public Pair>> generateDefaultAspectsO } } - if (shouldCheckDataPlatform && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null - && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + if (shouldCheckDataPlatform + && latestAspects.get(DATA_PLATFORM_INSTANCE_ASPECT_NAME) == null + && !includedAspects.containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { RecordTemplate keyAspect = includedAspects.get(keyAspectName); DataPlatformInstanceUtils.buildDataPlatformInstance(entityType, keyAspect) - .ifPresent(aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); + .ifPresent( + aspect -> returnAspects.add(Pair.of(DATA_PLATFORM_INSTANCE_ASPECT_NAME, aspect))); } } @@ -1274,8 +1596,8 @@ public Pair>> generateDefaultAspectsO } @Override - public List> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map includedAspects) { + public List> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map includedAspects) { final String keyAspectName = getKeyAspectName(urn); @@ -1284,10 +1606,12 @@ public List> generateDefaultAspectsIfMissing(@Nonnu } else { // No key aspect being written, generate it and potentially suggest writing it later HashMap includedWithKeyAspect = new HashMap<>(includedAspects); - Pair keyAspect = Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); + Pair keyAspect = + Pair.of(keyAspectName, EntityUtils.buildKeyAspect(_entityRegistry, urn)); includedWithKeyAspect.put(keyAspect.getKey(), keyAspect.getValue()); - Pair>> returnAspects = generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); + Pair>> returnAspects = + generateDefaultAspectsOnFirstWrite(urn, includedWithKeyAspect); // missing key aspect in database, add it if (!returnAspects.getFirst()) { @@ -1298,24 +1622,36 @@ public List> generateDefaultAspectsIfMissing(@Nonnu } } - private void ingestSnapshotUnion(@Nonnull final Snapshot snapshotUnion, @Nonnull final AuditStamp auditStamp, + private void ingestSnapshotUnion( + @Nonnull final Snapshot snapshotUnion, + @Nonnull final AuditStamp auditStamp, SystemMetadata systemMetadata) { - final RecordTemplate snapshotRecord = RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); + final RecordTemplate snapshotRecord = + RecordUtils.getSelectedRecordTemplateFromUnion(snapshotUnion); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshot(snapshotRecord); final List> aspectRecordsToIngest = NewModelUtils.getAspectsFromSnapshot(snapshotRecord); log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); - aspectRecordsToIngest.addAll(generateDefaultAspectsIfMissing(urn, - aspectRecordsToIngest.stream().collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); - - AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() - .items(aspectRecordsToIngest.stream().map(pair -> UpsertBatchItem.builder() - .urn(urn) - .aspectName(pair.getKey()) - .aspect(pair.getValue()) - .systemMetadata(systemMetadata) - .build(_entityRegistry)).collect(Collectors.toList())) + aspectRecordsToIngest.addAll( + generateDefaultAspectsIfMissing( + urn, + aspectRecordsToIngest.stream() + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)))); + + AspectsBatchImpl aspectsBatch = + AspectsBatchImpl.builder() + .items( + aspectRecordsToIngest.stream() + .map( + pair -> + UpsertBatchItem.builder() + .urn(urn) + .aspectName(pair.getKey()) + .aspect(pair.getValue()) + .systemMetadata(systemMetadata) + .build(_entityRegistry)) + .collect(Collectors.toList())) .build(); ingestAspects(aspectsBatch, auditStamp, true, true); @@ -1333,7 +1669,8 @@ public AspectSpec getKeyAspectSpec(@Nonnull final String entityName) { } @Override - public Optional getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName) { + public Optional getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return Optional.ofNullable(entitySpec.getAspectSpec(aspectName)); } @@ -1355,25 +1692,29 @@ protected Snapshot toSnapshotUnion(@Nonnull final RecordTemplate snapshotRecord) return snapshot; } - protected RecordTemplate toSnapshotRecord(@Nonnull final Urn urn, - @Nonnull final List aspectUnionTemplates) { + protected RecordTemplate toSnapshotRecord( + @Nonnull final Urn urn, @Nonnull final List aspectUnionTemplates) { final String entityName = urnToEntityName(urn); final EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); return com.datahub.util.ModelUtils.newSnapshot( - getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), urn, + getDataTemplateClassFromSchema(entitySpec.getSnapshotSchema(), RecordTemplate.class), + urn, aspectUnionTemplates); } - protected UnionTemplate toAspectUnion(@Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { + protected UnionTemplate toAspectUnion( + @Nonnull final Urn urn, @Nonnull final RecordTemplate aspectRecord) { final EntitySpec entitySpec = _entityRegistry.getEntitySpec(urnToEntityName(urn)); final TyperefDataSchema aspectSchema = entitySpec.getAspectTyperefSchema(); if (aspectSchema == null) { throw new RuntimeException( - String.format("Aspect schema for %s is null: v4 operation is not supported on this entity registry", + String.format( + "Aspect schema for %s is null: v4 operation is not supported on this entity registry", entitySpec.getName())); } return com.datahub.util.ModelUtils.newAspectUnion( - getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), aspectRecord); + getDataTemplateClassFromSchema(entitySpec.getAspectTyperefSchema(), UnionTemplate.class), + aspectRecord); } protected Urn toUrn(final String urnStr) { @@ -1381,26 +1722,32 @@ protected Urn toUrn(final String urnStr) { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { log.error("Failed to convert urn string {} into Urn object", urnStr); - throw new ModelConversionException(String.format("Failed to convert urn string %s into Urn object ", urnStr), e); + throw new ModelConversionException( + String.format("Failed to convert urn string %s into Urn object ", urnStr), e); } } - private EntityResponse toEntityResponse(final Urn urn, final List envelopedAspects) { + private EntityResponse toEntityResponse( + final Urn urn, final List envelopedAspects) { final EntityResponse response = new EntityResponse(); response.setUrn(urn); response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream().collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); return response; } private Map> buildEntityToValidAspects(final EntityRegistry entityRegistry) { - return entityRegistry.getEntitySpecs() - .values() - .stream() - .collect(Collectors.toMap(EntitySpec::getName, - entry -> entry.getAspectSpecs().stream().map(AspectSpec::getName).collect(Collectors.toSet()))); + return entityRegistry.getEntitySpecs().values().stream() + .collect( + Collectors.toMap( + EntitySpec::getName, + entry -> + entry.getAspectSpecs().stream() + .map(AspectSpec::getName) + .collect(Collectors.toSet()))); } @Override @@ -1429,44 +1776,68 @@ public void setWritable(boolean canWrite) { } @Override - public RollbackRunResult rollbackRun(List aspectRows, String runId, boolean hardDelete) { + public RollbackRunResult rollbackRun( + List aspectRows, String runId, boolean hardDelete) { return rollbackWithConditions(aspectRows, Collections.singletonMap("runId", runId), hardDelete); } @Override - public RollbackRunResult rollbackWithConditions(List aspectRows, Map conditions, boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List aspectRows, Map conditions, boolean hardDelete) { List removedAspects = new ArrayList<>(); AtomicInteger rowsDeletedFromEntityDeletion = new AtomicInteger(0); - List> futures = aspectRows.stream().map(aspectToRemove -> { - RollbackResult result = deleteAspect(aspectToRemove.getUrn(), aspectToRemove.getAspectName(), - conditions, hardDelete); - if (result != null) { - Optional aspectSpec = getAspectSpec(result.entityName, result.aspectName); - if (!aspectSpec.isPresent()) { - log.error("Issue while rolling back: unknown aspect {} for entity {}", result.entityName, result.aspectName); - return null; - } - - rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); - removedAspects.add(aspectToRemove); - return alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), aspectSpec.get(), - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: use properly attributed audit stamp. - createSystemAuditStamp(), - result.getChangeType()).getFirst(); - } - - return null; - }).filter(Objects::nonNull).collect(Collectors.toList()); + List> futures = + aspectRows.stream() + .map( + aspectToRemove -> { + RollbackResult result = + deleteAspect( + aspectToRemove.getUrn(), + aspectToRemove.getAspectName(), + conditions, + hardDelete); + if (result != null) { + Optional aspectSpec = + getAspectSpec(result.entityName, result.aspectName); + if (!aspectSpec.isPresent()) { + log.error( + "Issue while rolling back: unknown aspect {} for entity {}", + result.entityName, + result.aspectName); + return null; + } + + rowsDeletedFromEntityDeletion.addAndGet(result.additionalRowsAffected); + removedAspects.add(aspectToRemove); + return alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + aspectSpec.get(), + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: use properly attributed audit stamp. + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); + } + + return null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); - futures.forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion.get()); } @@ -1490,8 +1861,14 @@ public RollbackRunResult deleteUrn(Urn urn) { return new RollbackRunResult(removedAspects, rowsDeletedFromEntityDeletion); } - SystemMetadata latestKeySystemMetadata = EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); - RollbackResult result = deleteAspect(urn.toString(), keyAspectName, Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), true); + SystemMetadata latestKeySystemMetadata = + EntityUtils.parseSystemMetadata(latestKey.getSystemMetadata()); + RollbackResult result = + deleteAspect( + urn.toString(), + keyAspectName, + Collections.singletonMap("runId", latestKeySystemMetadata.getRunId()), + true); if (result != null) { AspectRowSummary summary = new AspectRowSummary(); @@ -1503,11 +1880,20 @@ public RollbackRunResult deleteUrn(Urn urn) { rowsDeletedFromEntityDeletion = result.additionalRowsAffected; removedAspects.add(summary); - Future future = alwaysProduceMCLAsync(result.getUrn(), result.getEntityName(), result.getAspectName(), keySpec, - result.getOldValue(), result.getNewValue(), result.getOldSystemMetadata(), result.getNewSystemMetadata(), - // TODO: Use a proper inferred audit stamp - createSystemAuditStamp(), - result.getChangeType()).getFirst(); + Future future = + alwaysProduceMCLAsync( + result.getUrn(), + result.getEntityName(), + result.getAspectName(), + keySpec, + result.getOldValue(), + result.getNewValue(), + result.getOldSystemMetadata(), + result.getNewSystemMetadata(), + // TODO: Use a proper inferred audit stamp + createSystemAuditStamp(), + result.getChangeType()) + .getFirst(); if (future != null) { try { @@ -1530,9 +1916,12 @@ public RollbackRunResult deleteUrn(Urn urn) { @Override public Boolean exists(Urn urn) { final Set aspectsToFetch = getEntityAspectNames(urn); - final List dbKeys = aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); + final List dbKeys = + aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); Map aspects = _aspectDao.batchGet(new HashSet(dbKeys)); return aspects.values().stream().anyMatch(aspect -> aspect != null); @@ -1553,14 +1942,16 @@ public Boolean isSoftDeleted(@Nonnull final Urn urn) { @Override public Boolean exists(Urn urn, String aspectName) { - EntityAspectIdentifier dbKey = new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); + EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION); Map aspects = _aspectDao.batchGet(Set.of(dbKey)); return aspects.values().stream().anyMatch(Objects::nonNull); } @Nullable @Override - public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map conditions, boolean hardDelete) { + public RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map conditions, boolean hardDelete) { // Validate pre-conditions before running queries Urn entityUrn; EntitySpec entitySpec; @@ -1573,120 +1964,153 @@ public RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map { - Integer additionalRowsDeleted = 0; + final RollbackResult result = + _aspectDao.runInTransactionWithRetry( + (tx) -> { + Integer additionalRowsDeleted = 0; - // 1. Fetch the latest existing version of the aspect. - final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); + // 1. Fetch the latest existing version of the aspect. + final EntityAspect latest = _aspectDao.getLatestAspect(urn, aspectName); - // 1.1 If no latest exists, skip this aspect - if (latest == null) { - return null; - } + // 1.1 If no latest exists, skip this aspect + if (latest == null) { + return null; + } - // 2. Compare the match conditions, if they don't match, ignore. - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); - if (!filterMatch(latestSystemMetadata, conditions)) { - return null; - } - String latestMetadata = latest.getMetadata(); + // 2. Compare the match conditions, if they don't match, ignore. + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + if (!filterMatch(latestSystemMetadata, conditions)) { + return null; + } + String latestMetadata = latest.getMetadata(); - // 3. Check if this is a key aspect - Boolean isKeyAspect = false; - try { - isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); - } catch (URISyntaxException e) { - log.error("Error occurred while parsing urn: {}", urn, e); - } + // 3. Check if this is a key aspect + Boolean isKeyAspect = false; + try { + isKeyAspect = getKeyAspectName(Urn.createFromString(urn)).equals(aspectName); + } catch (URISyntaxException e) { + log.error("Error occurred while parsing urn: {}", urn, e); + } - // 4. Fetch all preceding aspects, that match - List aspectsToDelete = new ArrayList<>(); - long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); - EntityAspect survivingAspect = null; - String previousMetadata = null; - boolean filterMatch = true; - while (maxVersion > 0 && filterMatch) { - EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); - SystemMetadata previousSysMetadata = EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); - filterMatch = filterMatch(previousSysMetadata, conditions); - if (filterMatch) { - aspectsToDelete.add(candidateAspect); - maxVersion = maxVersion - 1; - } else { - survivingAspect = candidateAspect; - previousMetadata = survivingAspect.getMetadata(); - } - } + // 4. Fetch all preceding aspects, that match + List aspectsToDelete = new ArrayList<>(); + long maxVersion = _aspectDao.getMaxVersion(urn, aspectName); + EntityAspect survivingAspect = null; + String previousMetadata = null; + boolean filterMatch = true; + while (maxVersion > 0 && filterMatch) { + EntityAspect candidateAspect = _aspectDao.getAspect(urn, aspectName, maxVersion); + SystemMetadata previousSysMetadata = + EntityUtils.parseSystemMetadata(candidateAspect.getSystemMetadata()); + filterMatch = filterMatch(previousSysMetadata, conditions); + if (filterMatch) { + aspectsToDelete.add(candidateAspect); + maxVersion = maxVersion - 1; + } else { + survivingAspect = candidateAspect; + previousMetadata = survivingAspect.getMetadata(); + } + } - // 5. Apply deletes and fix up latest row - - aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); - - if (survivingAspect != null) { - // if there was a surviving aspect, copy its information into the latest row - // eBean does not like us updating a pkey column (version) for the surviving aspect - // as a result we copy information from survivingAspect to latest and delete survivingAspect - latest.setMetadata(survivingAspect.getMetadata()); - latest.setSystemMetadata(survivingAspect.getSystemMetadata()); - latest.setCreatedOn(survivingAspect.getCreatedOn()); - latest.setCreatedBy(survivingAspect.getCreatedBy()); - latest.setCreatedFor(survivingAspect.getCreatedFor()); - _aspectDao.saveAspect(tx, latest, false); - // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); - _aspectDao.deleteAspect(tx, survivingAspect); - } else { - if (isKeyAspect) { - if (hardDelete) { - // If this is the key aspect, delete the entity entirely. - additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); - } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { - // soft delete by setting status.removed=true (if applicable) - final Status statusAspect = new Status(); - statusAspect.setRemoved(true); - - final MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType(entityUrn.getEntityType()); - gmce.setAspectName(Constants.STATUS_ASPECT_NAME); - gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - this.ingestProposal(gmce, auditStamp, false); - } - } else { - // Else, only delete the specific aspect. - _aspectDao.deleteAspect(tx, latest); - } - } + // 5. Apply deletes and fix up latest row + + aspectsToDelete.forEach(aspect -> _aspectDao.deleteAspect(tx, aspect)); + + if (survivingAspect != null) { + // if there was a surviving aspect, copy its information into the latest row + // eBean does not like us updating a pkey column (version) for the surviving aspect + // as a result we copy information from survivingAspect to latest and delete + // survivingAspect + latest.setMetadata(survivingAspect.getMetadata()); + latest.setSystemMetadata(survivingAspect.getSystemMetadata()); + latest.setCreatedOn(survivingAspect.getCreatedOn()); + latest.setCreatedBy(survivingAspect.getCreatedBy()); + latest.setCreatedFor(survivingAspect.getCreatedFor()); + _aspectDao.saveAspect(tx, latest, false); + // metrics + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.deleteAspect(tx, survivingAspect); + } else { + if (isKeyAspect) { + if (hardDelete) { + // If this is the key aspect, delete the entity entirely. + additionalRowsDeleted = _aspectDao.deleteUrn(tx, urn); + } else if (entitySpec.hasAspect(Constants.STATUS_ASPECT_NAME)) { + // soft delete by setting status.removed=true (if applicable) + final Status statusAspect = new Status(); + statusAspect.setRemoved(true); + + final MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType(entityUrn.getEntityType()); + gmce.setAspectName(Constants.STATUS_ASPECT_NAME); + gmce.setAspect(GenericRecordUtils.serializeAspect(statusAspect)); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + this.ingestProposal(gmce, auditStamp, false); + } + } else { + // Else, only delete the specific aspect. + _aspectDao.deleteAspect(tx, latest); + } + } - // 6. Emit the Update - try { - final RecordTemplate latestValue = latest == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(latest.getUrn()), latest.getAspect(), - latestMetadata, getEntityRegistry()); - - final RecordTemplate previousValue = survivingAspect == null ? null - : EntityUtils.toAspectRecord(Urn.createFromString(survivingAspect.getUrn()), - survivingAspect.getAspect(), previousMetadata, getEntityRegistry()); - - final Urn urnObj = Urn.createFromString(urn); - // We are not deleting key aspect if hardDelete has not been set so do not return a rollback result - if (isKeyAspect && !hardDelete) { - return null; - } - return new RollbackResult(urnObj, urnObj.getEntityType(), latest.getAspect(), latestValue, - previousValue, latestSystemMetadata, - previousValue == null ? null : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), - survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, isKeyAspect, additionalRowsDeleted); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to emit the update for urn %s", urn)); - } catch (IllegalStateException e) { - log.warn("Unable to find aspect, rollback result will not be sent. Error: {}", e.getMessage()); - return null; - } - }, DEFAULT_MAX_TRANSACTION_RETRY); + // 6. Emit the Update + try { + final RecordTemplate latestValue = + latest == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(latest.getUrn()), + latest.getAspect(), + latestMetadata, + getEntityRegistry()); + + final RecordTemplate previousValue = + survivingAspect == null + ? null + : EntityUtils.toAspectRecord( + Urn.createFromString(survivingAspect.getUrn()), + survivingAspect.getAspect(), + previousMetadata, + getEntityRegistry()); + + final Urn urnObj = Urn.createFromString(urn); + // We are not deleting key aspect if hardDelete has not been set so do not return a + // rollback result + if (isKeyAspect && !hardDelete) { + return null; + } + return new RollbackResult( + urnObj, + urnObj.getEntityType(), + latest.getAspect(), + latestValue, + previousValue, + latestSystemMetadata, + previousValue == null + ? null + : EntityUtils.parseSystemMetadata(survivingAspect.getSystemMetadata()), + survivingAspect == null ? ChangeType.DELETE : ChangeType.UPSERT, + isKeyAspect, + additionalRowsDeleted); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to emit the update for urn %s", urn)); + } catch (IllegalStateException e) { + log.warn( + "Unable to find aspect, rollback result will not be sent. Error: {}", + e.getMessage()); + return null; + } + }, + DEFAULT_MAX_TRANSACTION_RETRY); return result; } @@ -1720,21 +2144,32 @@ protected AuditStamp createSystemAuditStamp() { } @Nonnull - private Map getLatestAspect(@Nonnull final Set urns, @Nonnull final Set aspectNames) { + private Map getLatestAspect( + @Nonnull final Set urns, @Nonnull final Set aspectNames) { log.debug("Invoked getLatestAspects with urns: {}, aspectNames: {}", urns, aspectNames); // Create DB keys - final Set dbKeys = urns.stream().map(urn -> { - final Set aspectsToFetch = aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; - return aspectsToFetch.stream() - .map(aspectName -> new EntityAspectIdentifier(urn.toString(), aspectName, ASPECT_LATEST_VERSION)) - .collect(Collectors.toList()); - }).flatMap(List::stream).collect(Collectors.toSet()); + final Set dbKeys = + urns.stream() + .map( + urn -> { + final Set aspectsToFetch = + aspectNames.isEmpty() ? getEntityAspectNames(urn) : aspectNames; + return aspectsToFetch.stream() + .map( + aspectName -> + new EntityAspectIdentifier( + urn.toString(), aspectName, ASPECT_LATEST_VERSION)) + .collect(Collectors.toList()); + }) + .flatMap(List::stream) + .collect(Collectors.toSet()); Map batchGetResults = new HashMap<>(); Iterators.partition(dbKeys.iterator(), MAX_KEYS_PER_QUERY) - .forEachRemaining(batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); + .forEachRemaining( + batch -> batchGetResults.putAll(_aspectDao.batchGet(ImmutableSet.copyOf(batch)))); return batchGetResults; } @@ -1743,14 +2178,16 @@ private Map getLatestAspect(@Nonnull final * To do this, we want to fetch the maximum version and subtract the negative version from that. Since -1 represents * the maximum version, we need to add 1 to the final result. */ - private long calculateVersionNumber(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { + private long calculateVersionNumber( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version) { if (version < 0) { return _aspectDao.getMaxVersion(urn.toString(), aspectName) + version + 1; } return version; } - private Map getEnvelopedAspects(final Set dbKeys) { + private Map getEnvelopedAspects( + final Set dbKeys) { final Map result = new HashMap<>(); final Map dbEntries = _aspectDao.batchGet(dbKeys); @@ -1764,29 +2201,36 @@ private Map getEnvelopedAspects(final S } // Aspect found. Now turn it into an EnvelopedAspect - final com.linkedin.entity.Aspect aspect = RecordUtils.toRecordTemplate(com.linkedin.entity.Aspect.class, currAspectEntry - .getMetadata()); + final com.linkedin.entity.Aspect aspect = + RecordUtils.toRecordTemplate( + com.linkedin.entity.Aspect.class, currAspectEntry.getMetadata()); final EnvelopedAspect envelopedAspect = new EnvelopedAspect(); envelopedAspect.setName(currAspectEntry.getAspect()); envelopedAspect.setVersion(currAspectEntry.getVersion()); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setValue(aspect); try { if (currAspectEntry.getSystemMetadata() != null) { - final SystemMetadata systemMetadata = RecordUtils.toRecordTemplate(SystemMetadata.class, currAspectEntry.getSystemMetadata()); + final SystemMetadata systemMetadata = + RecordUtils.toRecordTemplate( + SystemMetadata.class, currAspectEntry.getSystemMetadata()); envelopedAspect.setSystemMetadata(systemMetadata); } } catch (Exception e) { - log.warn("Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", envelopedAspect.getName(), e); + log.warn( + "Exception encountered when setting system metadata on enveloped aspect {}. Error: {}", + envelopedAspect.getName(), + e); } - envelopedAspect.setCreated(new AuditStamp() - .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) - .setTime(currAspectEntry.getCreatedOn().getTime()) - ); + envelopedAspect.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(currAspectEntry.getCreatedBy())) + .setTime(currAspectEntry.getCreatedOn().getTime())); result.put(currKey, envelopedAspect); } return result; @@ -1802,40 +2246,50 @@ private EnvelopedAspect getKeyEnvelopedAspect(final Urn urn) { envelopedAspect.setName(keySpec.getName()); envelopedAspect.setVersion(ASPECT_LATEST_VERSION); envelopedAspect.setValue(aspect); - // TODO: I think we can assume this here, adding as it's a required field so object mapping barfs when trying to access it, + // TODO: I think we can assume this here, adding as it's a required field so object mapping + // barfs when trying to access it, // since nowhere else is using it should be safe for now at least envelopedAspect.setType(AspectType.VERSIONED); envelopedAspect.setCreated( - new AuditStamp().setActor(UrnUtils.getUrn(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); return envelopedAspect; } @Nonnull private UpdateAspectResult ingestAspectToLocalDB( - @Nullable Transaction tx, - @Nonnull final Urn urn, - @Nonnull final String aspectName, - @Nonnull final RecordTemplate newValue, - @Nonnull final AuditStamp auditStamp, - @Nonnull final SystemMetadata providedSystemMetadata, - @Nullable final EntityAspect latest, - @Nonnull final Long nextVersion) { - - // Set the "last run id" to be the run id provided with the new system metadata. This will be stored in index + @Nullable Transaction tx, + @Nonnull final Urn urn, + @Nonnull final String aspectName, + @Nonnull final RecordTemplate newValue, + @Nonnull final AuditStamp auditStamp, + @Nonnull final SystemMetadata providedSystemMetadata, + @Nullable final EntityAspect latest, + @Nonnull final Long nextVersion) { + + // Set the "last run id" to be the run id provided with the new system metadata. This will be + // stored in index // for all aspects that have a run id, regardless of whether they change. - providedSystemMetadata.setLastRunId(providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); + providedSystemMetadata.setLastRunId( + providedSystemMetadata.getRunId(GetMode.NULL), SetMode.IGNORE_NULL); // 2. Compare the latest existing and new. final RecordTemplate oldValue = - latest == null ? null : EntityUtils.toAspectRecord(urn, aspectName, latest.getMetadata(), getEntityRegistry()); + latest == null + ? null + : EntityUtils.toAspectRecord( + urn, aspectName, latest.getMetadata(), getEntityRegistry()); // 3. If there is no difference between existing and new, we just update // the lastObserved in system metadata. RunId should stay as the original runId if (oldValue != null && DataTemplateUtil.areEqual(oldValue, newValue)) { - SystemMetadata latestSystemMetadata = EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); + SystemMetadata latestSystemMetadata = + EntityUtils.parseSystemMetadata(latest.getSystemMetadata()); latestSystemMetadata.setLastObserved(providedSystemMetadata.getLastObserved()); - latestSystemMetadata.setLastRunId(providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); + latestSystemMetadata.setLastRunId( + providedSystemMetadata.getLastRunId(GetMode.NULL), SetMode.IGNORE_NULL); latest.setSystemMetadata(RecordUtils.toJsonString(latestSystemMetadata)); @@ -1843,55 +2297,70 @@ private UpdateAspectResult ingestAspectToLocalDB( _aspectDao.saveAspect(tx, latest, false); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, latest.getAspect().getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(oldValue) - .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(latestSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(0) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(oldValue) + .oldSystemMetadata(EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(latestSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(0) + .build(); } // 4. Save the newValue as the latest version log.debug("Ingesting aspect with name {}, urn {}", aspectName, urn); String newValueStr = EntityUtils.toJsonAspect(newValue); - long versionOfOld = _aspectDao.saveLatestAspect(tx, urn.toString(), aspectName, latest == null ? null : EntityUtils.toJsonAspect(oldValue), - latest == null ? null : latest.getCreatedBy(), latest == null ? null : latest.getCreatedFor(), - latest == null ? null : latest.getCreatedOn(), latest == null ? null : latest.getSystemMetadata(), - newValueStr, auditStamp.getActor().toString(), - auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, - new Timestamp(auditStamp.getTime()), EntityUtils.toJsonAspect(providedSystemMetadata), nextVersion); + long versionOfOld = + _aspectDao.saveLatestAspect( + tx, + urn.toString(), + aspectName, + latest == null ? null : EntityUtils.toJsonAspect(oldValue), + latest == null ? null : latest.getCreatedBy(), + latest == null ? null : latest.getCreatedFor(), + latest == null ? null : latest.getCreatedOn(), + latest == null ? null : latest.getSystemMetadata(), + newValueStr, + auditStamp.getActor().toString(), + auditStamp.hasImpersonator() ? auditStamp.getImpersonator().toString() : null, + new Timestamp(auditStamp.getTime()), + EntityUtils.toJsonAspect(providedSystemMetadata), + nextVersion); // metrics - _aspectDao.incrementWriteMetrics(aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); + _aspectDao.incrementWriteMetrics( + aspectName, 1, newValueStr.getBytes(StandardCharsets.UTF_8).length); return UpdateAspectResult.builder() - .urn(urn) - .oldValue(oldValue) - .newValue(newValue) - .oldSystemMetadata(latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) - .newSystemMetadata(providedSystemMetadata) - .operation(MetadataAuditOperation.UPDATE) - .auditStamp(auditStamp) - .maxVersion(versionOfOld) - .build(); + .urn(urn) + .oldValue(oldValue) + .newValue(newValue) + .oldSystemMetadata( + latest == null ? null : EntityUtils.parseSystemMetadata(latest.getSystemMetadata())) + .newSystemMetadata(providedSystemMetadata) + .operation(MetadataAuditOperation.UPDATE) + .auditStamp(auditStamp) + .maxVersion(versionOfOld) + .build(); } /** * Builds the default browse path aspects for a subset of well-supported entities. * - * This method currently supports datasets, charts, dashboards, data flows, data jobs, and glossary terms. + *

This method currently supports datasets, charts, dashboards, data flows, data jobs, and + * glossary terms. */ @Nonnull @Override public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - String defaultBrowsePath = getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); + String defaultBrowsePath = + getDefaultBrowsePath(urn, this.getEntityRegistry(), dataPlatformDelimiter); StringArray browsePaths = new StringArray(); browsePaths.add(defaultBrowsePath); BrowsePaths browsePathAspect = new BrowsePaths(); @@ -1902,19 +2371,19 @@ public BrowsePaths buildDefaultBrowsePath(final @Nonnull Urn urn) throws URISynt /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + *

This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull @Override - public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException { + public BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException { Character dataPlatformDelimiter = getDataPlatformDelimiter(urn); - return BrowsePathV2Utils.getDefaultBrowsePathV2(urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); + return BrowsePathV2Utils.getDefaultBrowsePathV2( + urn, this.getEntityRegistry(), dataPlatformDelimiter, this, useContainerPaths); } - /** - * Returns a delimiter on which the name of an asset may be split. - */ + /** Returns a delimiter on which the name of an asset may be split. */ private Character getDataPlatformDelimiter(Urn urn) { // Attempt to construct the appropriate Data Platform URN Urn dataPlatformUrn = buildDataPlatformUrn(urn, this.getEntityRegistry()); @@ -1932,15 +2401,20 @@ private Character getDataPlatformDelimiter(Urn urn) { @Nullable private DataPlatformInfo getDataPlatformInfo(Urn urn) { try { - final EntityResponse entityResponse = getEntityV2( - Constants.DATA_PLATFORM_ENTITY_NAME, - urn, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects() - .containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { + final EntityResponse entityResponse = + getEntityV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + urn, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)) { return new DataPlatformInfo( - entityResponse.getAspects().get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + entityResponse + .getAspects() + .get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) + .getValue() + .data()); } } catch (Exception e) { log.warn(String.format("Failed to find Data Platform Info for urn %s", urn)); @@ -1949,7 +2423,8 @@ private DataPlatformInfo getDataPlatformInfo(Urn urn) { } private static boolean shouldAspectEmitChangeLog(@Nonnull final AspectSpec aspectSpec) { - final List relationshipFieldSpecs = aspectSpec.getRelationshipFieldSpecs(); + final List relationshipFieldSpecs = + aspectSpec.getRelationshipFieldSpecs(); return relationshipFieldSpecs.stream().anyMatch(RelationshipFieldSpec::isLineageRelationship); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java index ffd63479589bc..c2a0a211f9e76 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.datahub.util.RecordUtils; import com.google.common.base.Preconditions; import com.linkedin.common.AuditStamp; @@ -18,24 +21,17 @@ import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.util.List; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityUtils { - private EntityUtils() { - } + private EntityUtils() {} public static final int URN_NUM_BYTES_LIMIT = 512; public static final String URN_DELIMITER_SEPARATOR = "␟"; @@ -63,17 +59,19 @@ public static AuditStamp getAuditStamp(Urn actor) { } public static void ingestChangeProposals( - @Nonnull List changes, - @Nonnull EntityService entityService, - @Nonnull Urn actor, - @Nonnull Boolean async - ) { - entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(changes, entityService.getEntityRegistry()).build(), getAuditStamp(actor), async); + @Nonnull List changes, + @Nonnull EntityService entityService, + @Nonnull Urn actor, + @Nonnull Boolean async) { + entityService.ingestProposal( + AspectsBatchImpl.builder().mcps(changes, entityService.getEntityRegistry()).build(), + getAuditStamp(actor), + async); } /** * Get aspect from entity + * * @param entityUrn URN of the entity * @param aspectName aspect name string * @param entityService EntityService obj @@ -82,11 +80,10 @@ public static void ingestChangeProposals( */ @Nullable public static RecordTemplate getAspectFromEntity( - String entityUrn, - String aspectName, - EntityService entityService, - RecordTemplate defaultValue - ) { + String entityUrn, + String aspectName, + EntityService entityService, + RecordTemplate defaultValue) { Urn urn = getUrnFromString(entityUrn); if (urn == null) { return defaultValue; @@ -99,11 +96,10 @@ public static RecordTemplate getAspectFromEntity( return aspect; } catch (Exception e) { log.error( - "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", - entityUrn, - aspectName, - e.toString() - ); + "Error constructing aspect from entity. Entity: {} aspect: {}. Error: {}", + entityUrn, + aspectName, + e.toString()); return null; } } @@ -114,7 +110,8 @@ public static RecordTemplate toAspectRecord( @Nonnull final String aspectName, @Nonnull final String jsonAspect, @Nonnull final EntityRegistry entityRegistry) { - return toAspectRecord(PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); + return toAspectRecord( + PegasusUtils.urnToEntityName(entityUrn), aspectName, jsonAspect, entityRegistry); } /** @@ -131,13 +128,17 @@ public static RecordTemplate toAspectRecord( @Nonnull final EntityRegistry entityRegistry) { final EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); final AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); - //TODO: aspectSpec can be null here - Preconditions.checkState(aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); + // TODO: aspectSpec can be null here + Preconditions.checkState( + aspectSpec != null, String.format("Aspect %s could not be found", aspectName)); final RecordDataSchema aspectSchema = aspectSpec.getPegasusSchema(); - RecordTemplate aspectRecord = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); - RecordTemplateValidator.validate(aspectRecord, validationFailure -> { - log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); - }); + RecordTemplate aspectRecord = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), jsonAspect); + RecordTemplateValidator.validate( + aspectRecord, + validationFailure -> { + log.warn(String.format("Failed to validate record %s against its schema.", aspectRecord)); + }); return aspectRecord; } @@ -151,16 +152,14 @@ public static SystemMetadata parseSystemMetadata(String jsonSystemMetadata) { return RecordUtils.toRecordTemplate(SystemMetadata.class, jsonSystemMetadata); } - /** - * Check if entity is removed (removed=true in Status aspect) and exists - */ + /** Check if entity is removed (removed=true in Status aspect) and exists */ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) { try { - + if (!entityService.exists(entityUrn)) { return false; } - + EnvelopedAspect statusAspect = entityService.getLatestEnvelopedAspect(entityUrn.getEntityType(), entityUrn, "status"); if (statusAspect == null) { @@ -174,7 +173,8 @@ public static boolean checkIfRemoved(EntityService entityService, Urn entityUrn) } } - public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { + public static RecordTemplate buildKeyAspect( + @Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { final EntitySpec spec = entityRegistry.getEntitySpec(urnToEntityName(urn)); final AspectSpec keySpec = spec.getKeyAspectSpec(); return EntityKeyUtils.convertUrnToEntityKey(urn, keySpec); @@ -183,18 +183,27 @@ public static RecordTemplate buildKeyAspect(@Nonnull EntityRegistry entityRegist public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull final Urn urn) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entityRegistry.getEntitySpec(urn.getEntityType())); - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), validationResult -> { - throw new IllegalArgumentException("Invalid urn: " + urn + "\n Cause: " - + validationResult.getMessages()); }, validator); + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), + validationResult -> { + throw new IllegalArgumentException( + "Invalid urn: " + urn + "\n Cause: " + validationResult.getMessages()); + }, + validator); if (urn.toString().trim().length() != urn.toString().length()) { - throw new IllegalArgumentException("Error: cannot provide an URN with leading or trailing whitespace"); + throw new IllegalArgumentException( + "Error: cannot provide an URN with leading or trailing whitespace"); } if (URLEncoder.encode(urn.toString()).length() > URN_NUM_BYTES_LIMIT) { - throw new IllegalArgumentException("Error: cannot provide an URN longer than " + Integer.toString(URN_NUM_BYTES_LIMIT) + " bytes (when URL encoded)"); + throw new IllegalArgumentException( + "Error: cannot provide an URN longer than " + + Integer.toString(URN_NUM_BYTES_LIMIT) + + " bytes (when URL encoded)"); } if (urn.toString().contains(URN_DELIMITER_SEPARATOR)) { - throw new IllegalArgumentException("Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); + throw new IllegalArgumentException( + "Error: URN cannot contain " + URN_DELIMITER_SEPARATOR + " character"); } try { Urn.createFromString(urn.toString()); @@ -202,5 +211,4 @@ public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull throw new IllegalArgumentException(e); } } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java index 81eb5d4eb947c..c0ee01abe0a84 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/NewModelUtils.java @@ -20,12 +20,10 @@ import org.reflections.Reflections; import org.reflections.scanners.Scanner; - public class NewModelUtils { private static final ClassLoader CLASS_LOADER = DummySnapshot.class.getClassLoader(); - private NewModelUtils() { - } + private NewModelUtils() {} public static String getAspectName(@Nonnull Class aspectClass) { return aspectClass.getCanonicalName(); @@ -36,9 +34,9 @@ public static Class getAspectClass(@Nonnull String asp return getClassFromName(aspectName, RecordTemplate.class); } - @Nonnull - public static Class getClassFromName(@Nonnull String className, @Nonnull Class parentClass) { + public static Class getClassFromName( + @Nonnull String className, @Nonnull Class parentClass) { try { return CLASS_LOADER.loadClass(className).asSubclass(parentClass); } catch (ClassNotFoundException var3) { @@ -47,8 +45,8 @@ public static Class getClassFromName(@Nonnull String className, } @Nonnull - public static List> getAspectsFromSnapshot( - @Nonnull SNAPSHOT snapshot) { + public static + List> getAspectsFromSnapshot(@Nonnull SNAPSHOT snapshot) { SnapshotValidator.validateSnapshotSchema(snapshot.getClass()); return getAspects(snapshot); } @@ -57,28 +55,34 @@ public static List> getAspects(@Nonnull RecordTemplate snapshot) { Class clazz = getAspectsArrayClass(snapshot.getClass()); WrappingArrayTemplate aspectArray = - (WrappingArrayTemplate) RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); + (WrappingArrayTemplate) + RecordUtils.getRecordTemplateWrappedField(snapshot, "aspects", clazz); List> aspects = new ArrayList(); - aspectArray.forEach((item) -> { - try { - RecordTemplate aspect = RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); - String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); - aspects.add(Pair.of(name, aspect)); - } catch (InvalidSchemaException e) { - // ignore fields that are not part of the union - } catch (TemplateOutputCastException e) { - // ignore fields that are not part of the union - } - }); + aspectArray.forEach( + (item) -> { + try { + RecordTemplate aspect = + RecordUtils.getSelectedRecordTemplateFromUnion((UnionTemplate) item); + String name = PegasusUtils.getAspectNameFromSchema(aspect.schema()); + aspects.add(Pair.of(name, aspect)); + } catch (InvalidSchemaException e) { + // ignore fields that are not part of the union + } catch (TemplateOutputCastException e) { + // ignore fields that are not part of the union + } + }); return aspects; } - @Nonnull - private static Class getAspectsArrayClass( - @Nonnull Class snapshotClass) { + private static + Class getAspectsArrayClass( + @Nonnull Class snapshotClass) { try { - return snapshotClass.getMethod("getAspects").getReturnType().asSubclass(WrappingArrayTemplate.class); + return snapshotClass + .getMethod("getAspects") + .getReturnType() + .asSubclass(WrappingArrayTemplate.class); } catch (ClassCastException | NoSuchMethodException var2) { throw new RuntimeException(var2); } @@ -86,10 +90,10 @@ private static Class> getAllEntities() { - return (Set) (new Reflections("com.linkedin.metadata.entity", new Scanner[0])).getSubTypesOf(RecordTemplate.class) - .stream() - .filter(EntityValidator::isValidEntitySchema) - .collect(Collectors.toSet()); + return (Set) + (new Reflections("com.linkedin.metadata.entity", new Scanner[0])) + .getSubTypesOf(RecordTemplate.class).stream() + .filter(EntityValidator::isValidEntitySchema) + .collect(Collectors.toSet()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java index 7804aa2067088..43df42713cc4d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/AspectStorageValidationUtil.java @@ -2,23 +2,24 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; - import javax.annotation.Nonnull; public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - } + private AspectStorageValidationUtil() {} /** * Check if entity aspect table exists in the database. + * * @param session * @return {@code true} if table exists. */ public static boolean checkTableExists(@Nonnull CqlSession session) { - String query = String.format("SELECT table_name \n " - + "FROM system_schema.tables where table_name = '%s' allow filtering;", - CassandraAspect.TABLE_NAME); + String query = + String.format( + "SELECT table_name \n " + + "FROM system_schema.tables where table_name = '%s' allow filtering;", + CassandraAspect.TABLE_NAME); ResultSet rs = session.execute(query); return rs.all().size() > 0; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java index 891a47130fe25..d68386291acb3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspect.java @@ -1,23 +1,22 @@ package com.linkedin.metadata.entity.cassandra; import com.datastax.oss.driver.api.core.cql.Row; -import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; +import java.sql.Timestamp; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import javax.annotation.Nonnull; -import java.sql.Timestamp; - /** - * This class represents entity aspect records stored in Cassandra database. - * It's also aware of {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows - * how to translate itself to it. + * This class represents entity aspect records stored in Cassandra database. It's also aware of + * {@link EntityAspect} which is a shared in-memory representation of an aspect record and knows how + * to translate itself to it. * - * TODO: Consider using datastax java driver `@Entity` - * (see: https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) + *

TODO: Consider using datastax java driver `@Entity` (see: + * https://docs.datastax.com/en/developer/java-driver/4.13/manual/mapper/entities/) */ @Getter @Setter @@ -61,7 +60,9 @@ public static EntityAspect rowToEntityAspect(@Nonnull Row row) { row.getLong(CassandraAspect.VERSION_COLUMN), row.getString(CassandraAspect.METADATA_COLUMN), row.getString(CassandraAspect.SYSTEM_METADATA_COLUMN), - row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null ? null : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), + row.getInstant(CassandraAspect.CREATED_ON_COLUMN) == null + ? null + : Timestamp.from(row.getInstant(CassandraAspect.CREATED_ON_COLUMN)), row.getString(CassandraAspect.CREATED_BY_COLUMN), row.getString(CassandraAspect.CREATED_FOR_COLUMN)); } @@ -73,5 +74,4 @@ public static EntityAspectIdentifier rowToAspectIdentifier(@Nonnull Row row) { row.getString(CassandraAspect.ASPECT_COLUMN), row.getLong(CassandraAspect.VERSION_COLUMN)); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java index 9f4a36efb4501..3293bc6178e43 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraAspectDao.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; import com.datastax.oss.driver.api.core.CqlSession; @@ -31,6 +34,8 @@ import com.linkedin.metadata.query.ExtraInfo; import com.linkedin.metadata.query.ExtraInfoArray; import com.linkedin.metadata.query.ListResultMetadata; +import io.ebean.PagedList; +import io.ebean.Transaction; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; @@ -44,14 +49,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.ebean.PagedList; -import io.ebean.Transaction; import lombok.extern.slf4j.Slf4j; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.*; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class CassandraAspectDao implements AspectDao, AspectMigrationsDao { @@ -88,16 +87,22 @@ public EntityAspect getLatestAspect(@Nonnull String urn, @Nonnull String aspectN } @Override - public Map> getLatestAspects(Map> urnAspects) { + public Map> getLatestAspects( + Map> urnAspects) { return urnAspects.entrySet().stream() - .map(entry -> Map.entry(entry.getKey(), entry.getValue().stream() - .map(aspectName -> { - EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); - return aspect != null ? Map.entry(aspectName, aspect) : null; - }) - .filter(Objects::nonNull) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map( + entry -> + Map.entry( + entry.getKey(), + entry.getValue().stream() + .map( + aspectName -> { + EntityAspect aspect = getLatestAspect(entry.getKey(), aspectName); + return aspect != null ? Map.entry(aspectName, aspect) : null; + }) + .filter(Objects::nonNull) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Override @@ -110,48 +115,63 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec @Override public long countEntities() { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .distinct() - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .distinct() + .column(CassandraAspect.URN_COLUMN) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: make sure it doesn't blow up on a large database - // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but counting them in the app is dangerous - // The saving grace here is that the only place where this method is used should only run once, what the database is still young + // Getting a count of distinct values in a Cassandra query doesn't seem to be feasible, but + // counting them in the app is dangerous + // The saving grace here is that the only place where this method is used should only run once, + // what the database is still young return rs.all().size(); } @Override public boolean checkIfAspectExists(@Nonnull String aspectName) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .limit(1) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .column(CassandraAspect.URN_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .limit(1) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.one() != null; } - private Map getMaxVersions(@Nonnull final String urn, @Nonnull final Set aspectNames) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))) - .build(); + private Map getMaxVersions( + @Nonnull final String urn, @Nonnull final Set aspectNames) { + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))) + .build(); ResultSet rs = _cqlSession.execute(ss); - Map aspectVersions = rs.all().stream() - .collect(Collectors.toMap( - row -> row.getString(CassandraAspect.ASPECT_COLUMN), - row -> row.getLong(CassandraAspect.VERSION_COLUMN))); + Map aspectVersions = + rs.all().stream() + .collect( + Collectors.toMap( + row -> row.getString(CassandraAspect.ASPECT_COLUMN), + row -> row.getLong(CassandraAspect.VERSION_COLUMN))); // For each requested aspect that didn't come back from DB, add a version -1 for (String aspect : aspectNames) { @@ -164,7 +184,8 @@ private Map getMaxVersions(@Nonnull final String urn, @Nonnull fin } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull EntityAspect aspect, final boolean insert) { validateConnection(); SimpleStatement statement = generateSaveStatement(aspect, insert); _cqlSession.execute(statement); @@ -174,7 +195,8 @@ public void saveAspect(@Nullable Transaction tx, @Nonnull EntityAspect aspect, f // TODO: look into supporting pagination @Override @Nonnull - public Map batchGet(@Nonnull final Set keys) { + public Map batchGet( + @Nonnull final Set keys) { validateConnection(); return keys.stream() .map(this::getAspect) @@ -210,13 +232,17 @@ public ListResult listAspectMetadata( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -224,53 +250,58 @@ public ListResult listAspectMetadata( OffsetPager offsetPager = new OffsetPager(pageSize); Page page = offsetPager.getPage(rs, pageNumber); - final List aspects = page - .getElements() - .stream().map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toList()); + final List aspects = + page.getElements().stream() + .map(CassandraAspect::rowToEntityAspect) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) - .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) + .countAll() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); long totalCount = _cqlSession.execute(ssCount).one().getLong(0); - final List aspectMetadatas = aspects - .stream() - .map(EntityAspect::getMetadata) - .collect(Collectors.toList()); + final List aspectMetadatas = + aspects.stream().map(EntityAspect::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(aspects - .stream() - .map(CassandraAspectDao::toExtraInfo) - .collect(Collectors.toList())); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + aspects.stream().map(CassandraAspectDao::toExtraInfo).collect(Collectors.toList())); - return toListResult(aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); + return toListResult( + aspectMetadatas, listResultMetadata, start, pageNumber, pageSize, totalCount); } @Override @Nonnull - public T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry) { + public T runInTransactionWithRetry( + @Nonnull final Function block, final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException; do { try { - // TODO: Try to bend this code to make use of Cassandra batches. This method is called from single-urn operations, so perf should not suffer much + // TODO: Try to bend this code to make use of Cassandra batches. This method is called from + // single-urn operations, so perf should not suffer much return block.apply(null); } catch (DriverException exception) { lastException = exception; } } while (++retryCount <= maxTransactionRetry); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } private ListResult toListResult( @@ -283,17 +314,18 @@ private ListResult toListResult( final int numPages = (int) (totalCount / pageSize + (totalCount % pageSize == 0 ? 0 : 1)); final boolean hasNext = pageNumber < numPages; - final int nextStart = (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; + final int nextStart = + (start != null && hasNext) ? (pageNumber * pageSize) : ListResult.INVALID_NEXT_START; return ListResult.builder() - .values(values) - .metadata(listResultMetadata) - .nextStart(nextStart) - .hasNext(hasNext) - .totalCount((int) totalCount) - .totalPageCount(numPages) - .pageSize(pageSize) - .build(); + .values(values) + .metadata(listResultMetadata) + .nextStart(nextStart) + .hasNext(hasNext) + .totalCount((int) totalCount) + .totalPageCount(numPages) + .pageSize(pageSize) + .build(); } @Nonnull @@ -336,12 +368,16 @@ private static AuditStamp toAuditStamp(@Nonnull final EntityAspect aspect) { @Override public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) - .ifExists() - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) + .ifExists() + .build(); _cqlSession.execute(ss); } @@ -349,9 +385,11 @@ public void deleteAspect(@Nullable Transaction tx, @Nonnull final EntityAspect a @Override public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { validateConnection(); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .build(); ResultSet rs = _cqlSession.execute(ss); // TODO: look into how to get around this for counts in Cassandra // https://stackoverflow.com/questions/28611459/how-to-know-affected-rows-in-cassandracql @@ -359,11 +397,14 @@ public int deleteUrn(@Nullable Transaction tx, @Nonnull final String urn) { } public List getAllAspects(String urn, String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream().map(CassandraAspect::rowToEntityAspect).collect(Collectors.toList()); @@ -373,13 +414,17 @@ public List getAllAspects(String urn, String aspectName) { @Nullable public EntityAspect getAspect(@Nonnull String urn, @Nonnull String aspectName, long version) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn)) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(version)) - .limit(1) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(version)) + .limit(1) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); @@ -395,17 +440,20 @@ public ListResult listUrns( final int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .columns( - CassandraAspect.URN_COLUMN, - CassandraAspect.ASPECT_COLUMN, - CassandraAspect.VERSION_COLUMN - ) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .columns( + CassandraAspect.URN_COLUMN, + CassandraAspect.ASPECT_COLUMN, + CassandraAspect.VERSION_COLUMN) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ENTITY_COLUMN) + .isEqualTo(literal(entityName)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); @@ -414,17 +462,20 @@ public ListResult listUrns( Page page = offsetPager.getPage(rs, pageNumber); - final List urns = page - .getElements() - .stream().map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) - .collect(Collectors.toList()); + final List urns = + page.getElements().stream() + .map(row -> CassandraAspect.rowToAspectIdentifier(row).getUrn()) + .collect(Collectors.toList()); // TODO: address performance issue for getting total count // https://www.datastax.com/blog/running-count-expensive-cassandra - SimpleStatement ssCount = selectFrom(CassandraAspect.TABLE_NAME) + SimpleStatement ssCount = + selectFrom(CassandraAspect.TABLE_NAME) .countAll() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(ASPECT_LATEST_VERSION)) .allowFiltering() .build(); @@ -457,9 +508,8 @@ public Stream streamAspects(String entityName, String aspectName) @Nonnull public Iterable listAllUrns(int start, int pageSize) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .column(CassandraAspect.URN_COLUMN) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME).column(CassandraAspect.URN_COLUMN).build(); ResultSet rs = _cqlSession.execute(ss); @@ -467,9 +517,8 @@ public Iterable listAllUrns(int start, int pageSize) { OffsetPager offsetPager = new OffsetPager(pageSize); Page page = offsetPager.getPage(rs, pageNumber); - return page - .getElements() - .stream().map(row -> row.getString(CassandraAspect.URN_COLUMN)) + return page.getElements().stream() + .map(row -> row.getString(CassandraAspect.URN_COLUMN)) .collect(Collectors.toList()); } @@ -496,21 +545,20 @@ public Map> getNextVersions(Map> u @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -521,7 +569,8 @@ public long saveLatestAspect( BatchStatement batch = BatchStatement.newInstance(BatchType.UNLOGGED); if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, largestVersion, @@ -529,13 +578,13 @@ public long saveLatestAspect( oldSystemMetadata, oldTime, oldActor, - oldImpersonator - ); + oldImpersonator); batch = batch.add(generateSaveStatement(aspect, true)); } // Save newValue as the latest version (v0) - final EntityAspect aspect = new EntityAspect( + final EntityAspect aspect = + new EntityAspect( urn, aspectName, ASPECT_LATEST_VERSION, @@ -543,8 +592,7 @@ public long saveLatestAspect( newSystemMetadata, newTime, newActor, - newImpersonator - ); + newImpersonator); batch = batch.add(generateSaveStatement(aspect, oldAspectMetadata == null)); _cqlSession.execute(batch); return largestVersion; @@ -558,7 +606,8 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser throw new RuntimeException(e); } if (insert) { - Insert ri = insertInto(CassandraAspect.TABLE_NAME) + Insert ri = + insertInto(CassandraAspect.TABLE_NAME) .value(CassandraAspect.URN_COLUMN, literal(aspect.getUrn())) .value(CassandraAspect.ASPECT_COLUMN, literal(aspect.getAspect())) .value(CassandraAspect.VERSION_COLUMN, literal(aspect.getVersion())) @@ -572,16 +621,23 @@ private SimpleStatement generateSaveStatement(EntityAspect aspect, boolean inser return ri.build(); } else { - UpdateWithAssignments uwa = update(CassandraAspect.TABLE_NAME) + UpdateWithAssignments uwa = + update(CassandraAspect.TABLE_NAME) .setColumn(CassandraAspect.METADATA_COLUMN, literal(aspect.getMetadata())) - .setColumn(CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) - .setColumn(CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) + .setColumn( + CassandraAspect.SYSTEM_METADATA_COLUMN, literal(aspect.getSystemMetadata())) + .setColumn( + CassandraAspect.CREATED_ON_COLUMN, literal(aspect.getCreatedOn().getTime())) .setColumn(CassandraAspect.CREATED_BY_COLUMN, literal(aspect.getCreatedBy())) .setColumn(CassandraAspect.CREATED_FOR_COLUMN, literal(aspect.getCreatedFor())); - Update u = uwa.whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(aspect.getUrn())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspect.getAspect())) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(aspect.getVersion())) + Update u = + uwa.whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(aspect.getUrn())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspect.getAspect())) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(aspect.getVersion())) .ifExists(); return u.build(); @@ -595,28 +651,28 @@ public void setWritable(boolean canWrite) { @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); - final EntityAspect aspect = new EntityAspect( - urn, - aspectName, - version, - aspectMetadata, - systemMetadata, - timestamp, - actor, - impersonator - ); + final EntityAspect aspect = + new EntityAspect( + urn, + aspectName, + version, + aspectMetadata, + systemMetadata, + timestamp, + actor, + impersonator); saveAspect(tx, aspect, insert); @@ -626,16 +682,22 @@ public void saveAspect( @Override @Nonnull - public List getAspectsInRange(@Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis) { + public List getAspectsInRange( + @Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).in(aspectNamesToLiterals(aspectNames)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(startTimeMillis)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isGreaterThan(literal(endTimeMillis)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .in(aspectNamesToLiterals(aspectNames)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(startTimeMillis)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isGreaterThan(literal(endTimeMillis)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java index 9ebb6b26fc43d..6a1ba72c37676 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/cassandra/CassandraRetentionService.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity.cassandra; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; +import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; + import com.datahub.util.RecordUtils; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.cql.ResultSet; @@ -10,10 +14,11 @@ import com.datastax.oss.driver.api.querybuilder.select.Selector; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityAspect; +import com.linkedin.metadata.entity.EntityAspectIdentifier; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; -import com.linkedin.metadata.entity.EntityAspectIdentifier; -import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.retention.BulkApplyRetentionArgs; import com.linkedin.metadata.entity.retention.BulkApplyRetentionResult; @@ -23,13 +28,7 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.opentelemetry.extension.annotations.WithSpan; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.sql.Timestamp; import java.time.Clock; import java.util.List; @@ -37,10 +36,10 @@ import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; - -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.deleteFrom; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.literal; -import static com.datastax.oss.driver.api.querybuilder.QueryBuilder.selectFrom; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -58,31 +57,38 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List retentionContexts) { - List nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + List nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); - nonEmptyContexts.forEach(context -> { - if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyVersionBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getVersion(), context.getMaxVersion()); - } + nonEmptyContexts.forEach( + context -> { + if (context.getRetentionPolicy().map(Retention::hasVersion).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyVersionBasedRetention( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()); + } - if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { - Retention retentionPolicy = context.getRetentionPolicy().get(); - applyTimeBasedRetention(context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); - } - }); + if (context.getRetentionPolicy().map(Retention::hasTime).orElse(false)) { + Retention retentionPolicy = context.getRetentionPolicy().get(); + applyTimeBasedRetention( + context.getUrn(), context.getAspectName(), retentionPolicy.getTime()); + } + }); } @Override @@ -111,18 +117,22 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } final String aspectNameFromRecord = id.getAspect(); // Get the retention policies to apply from the local retention policy map - Optional retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - retentionPolicy.ifPresent(retention -> - applyRetention(List.of(RetentionContext.builder() - .urn(urn) - .aspectName(aspectNameFromRecord) - .retentionPolicy(retentionPolicy) - .maxVersion(Optional.of(id.getVersion())) - .build()))); + Optional retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + retentionPolicy.ifPresent( + retention -> + applyRetention( + List.of( + RetentionContext.builder() + .urn(urn) + .aspectName(aspectNameFromRecord) + .retentionPolicy(retentionPolicy) + .maxVersion(Optional.of(id.getVersion())) + .build()))); i += 1; if (i % _batchSize == 0) { @@ -134,7 +144,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { log.error("batchApplyRetentionEntities not implemented for cassandra"); return null; } @@ -147,23 +158,31 @@ private void applyVersionBasedRetention( long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn, aspectName)); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isLessThanOrEqualTo(literal(largestVersion - retention.getMaxVersions() + 1L)) + .build(); _cqlSession.execute(ss); } private long getMaxVersion(@Nonnull final Urn urn, @Nonnull final String aspectName) { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .orderBy(CassandraAspect.VERSION_COLUMN, ClusteringOrder.DESC) + .build(); ResultSet rs = _cqlSession.execute(ss); Row row = rs.one(); return row.getLong(CassandraAspect.VERSION_COLUMN); @@ -174,47 +193,69 @@ private void applyTimeBasedRetention( @Nonnull final String aspectName, @Nonnull final TimeBasedRetention retention) { Timestamp threshold = new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000); - SimpleStatement ss = deleteFrom(CassandraAspect.TABLE_NAME) - .whereColumn(CassandraAspect.URN_COLUMN).isEqualTo(literal(urn.toString())) - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)) - .whereColumn(CassandraAspect.CREATED_ON_COLUMN).isLessThanOrEqualTo(literal(threshold)) - .build(); + SimpleStatement ss = + deleteFrom(CassandraAspect.TABLE_NAME) + .whereColumn(CassandraAspect.URN_COLUMN) + .isEqualTo(literal(urn.toString())) + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(aspectName)) + .whereColumn(CassandraAspect.CREATED_ON_COLUMN) + .isLessThanOrEqualTo(literal(threshold)) + .build(); _cqlSession.execute(ss); } - private List queryCandidates(@Nullable String entityName, @Nullable String aspectName) { - Select select = selectFrom(CassandraAspect.TABLE_NAME) - .selectors( - Selector.column(CassandraAspect.URN_COLUMN), - Selector.column(CassandraAspect.ASPECT_COLUMN), - Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)).as(CassandraAspect.VERSION_COLUMN)) - .allowFiltering(); + private List queryCandidates( + @Nullable String entityName, @Nullable String aspectName) { + Select select = + selectFrom(CassandraAspect.TABLE_NAME) + .selectors( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN), + Selector.function("max", Selector.column(CassandraAspect.VERSION_COLUMN)) + .as(CassandraAspect.VERSION_COLUMN)) + .allowFiltering(); if (aspectName != null) { select = select.whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(aspectName)); } - select = select.whereColumn(CassandraAspect.VERSION_COLUMN).isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); + select = + select + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isGreaterThan(literal(Constants.ASPECT_LATEST_VERSION)); if (entityName != null) { select = select.whereColumn(CassandraAspect.ENTITY_COLUMN).isEqualTo(literal(entityName)); } - select = select.groupBy(ImmutableList.of(Selector.column(CassandraAspect.URN_COLUMN), Selector.column(CassandraAspect.ASPECT_COLUMN))); + select = + select.groupBy( + ImmutableList.of( + Selector.column(CassandraAspect.URN_COLUMN), + Selector.column(CassandraAspect.ASPECT_COLUMN))); SimpleStatement ss = select.build(); ResultSet rs = _cqlSession.execute(ss); - return rs.all().stream().map(CassandraAspect::rowToAspectIdentifier).collect(Collectors.toList()); + return rs.all().stream() + .map(CassandraAspect::rowToAspectIdentifier) + .collect(Collectors.toList()); } private Map getAllRetentionPolicies() { - SimpleStatement ss = selectFrom(CassandraAspect.TABLE_NAME) - .all() - .whereColumn(CassandraAspect.ASPECT_COLUMN).isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) - .whereColumn(CassandraAspect.VERSION_COLUMN).isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) - .allowFiltering() - .build(); + SimpleStatement ss = + selectFrom(CassandraAspect.TABLE_NAME) + .all() + .whereColumn(CassandraAspect.ASPECT_COLUMN) + .isEqualTo(literal(Constants.DATAHUB_RETENTION_ASPECT)) + .whereColumn(CassandraAspect.VERSION_COLUMN) + .isEqualTo(literal(Constants.ASPECT_LATEST_VERSION)) + .allowFiltering() + .build(); ResultSet rs = _cqlSession.execute(ss); return rs.all().stream() .map(CassandraAspect::rowToEntityAspect) - .collect(Collectors.toMap( - EntityAspect::getUrn, - aspect -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, aspect.getMetadata()))); + .collect( + Collectors.toMap( + EntityAspect::getUrn, + aspect -> + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, aspect.getMetadata()))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java index c0aef268e14c9..b02ee0170354e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/AspectStorageValidationUtil.java @@ -1,30 +1,30 @@ package com.linkedin.metadata.entity.ebean; +import static io.ebean.Expr.ne; + import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.SqlQuery; import io.ebean.SqlRow; - import java.util.List; -import static io.ebean.Expr.ne; - - public class AspectStorageValidationUtil { - private AspectStorageValidationUtil() { - - } + private AspectStorageValidationUtil() {} public static long getV1RowCount(Database server) { return server.find(EbeanAspectV1.class).findCount(); } /** - * Get the number of rows created not by the DataHub system actor (urn:li:corpuser:__datahub_system) + * Get the number of rows created not by the DataHub system actor + * (urn:li:corpuser:__datahub_system) */ public static long getV2NonSystemRowCount(Database server) { - return server.find(EbeanAspectV2.class).where(ne("createdby", Constants.SYSTEM_ACTOR)).findCount(); + return server + .find(EbeanAspectV2.class) + .where(ne("createdby", Constants.SYSTEM_ACTOR)) + .findCount(); } public static boolean checkV2TableExists(Database server) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java index c16c98b34f3eb..b2b47c1d5ba32 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectDao.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.codahale.metrics.MetricRegistry; import com.datahub.util.exception.ModelConversionException; import com.datahub.util.exception.RetryLimitReached; @@ -19,8 +21,8 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; -import io.ebean.DuplicateKeyException; import io.ebean.Database; +import io.ebean.DuplicateKeyException; import io.ebean.ExpressionList; import io.ebean.Junction; import io.ebean.PagedList; @@ -45,14 +47,10 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import javax.persistence.PersistenceException; import javax.persistence.Table; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - @Slf4j public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { @@ -64,8 +62,10 @@ public class EbeanAspectDao implements AspectDao, AspectMigrationsDao { // while its storage is being migrated private boolean _canWrite = true; - // Why 375? From tuning, this seems to be about the largest size we can get without having ebean batch issues. - // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to tweak this without + // Why 375? From tuning, this seems to be about the largest size we can get without having ebean + // batch issues. + // This may be able to be moved up, 375 is a bit conservative. However, we should be careful to + // tweak this without // more testing. private int _queryKeysCount = 375; // 0 means no pagination on keys @@ -79,8 +79,7 @@ public void setWritable(boolean canWrite) { } /** - * Return the {@link Database} server instance used for customized queries. - * Only used in tests. + * Return the {@link Database} server instance used for customized queries. Only used in tests. */ public Database getServer() { return _server; @@ -96,8 +95,9 @@ private boolean validateConnection() { return true; } if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { - log.error("GMS is on a newer version than your storage layer. Please refer to " - + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); + log.error( + "GMS is on a newer version than your storage layer. Please refer to " + + "https://datahubproject.io/docs/advanced/no-code-upgrade to view the upgrade guide."); _canWrite = false; return false; } else { @@ -106,24 +106,22 @@ private boolean validateConnection() { } } - @Override public long saveLatestAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nullable final String oldAspectMetadata, - @Nullable final String oldActor, - @Nullable final String oldImpersonator, - @Nullable final Timestamp oldTime, - @Nullable final String oldSystemMetadata, - @Nonnull final String newAspectMetadata, - @Nonnull final String newActor, - @Nullable final String newImpersonator, - @Nonnull final Timestamp newTime, - @Nullable final String newSystemMetadata, - final Long nextVersion - ) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nullable final String oldAspectMetadata, + @Nullable final String oldActor, + @Nullable final String oldImpersonator, + @Nullable final Timestamp oldTime, + @Nullable final String oldSystemMetadata, + @Nonnull final String newAspectMetadata, + @Nonnull final String newActor, + @Nullable final String newImpersonator, + @Nonnull final Timestamp newTime, + @Nullable final String newSystemMetadata, + final Long nextVersion) { validateConnection(); if (!_canWrite) { @@ -133,27 +131,47 @@ public long saveLatestAspect( long largestVersion = ASPECT_LATEST_VERSION; if (oldAspectMetadata != null && oldTime != null) { largestVersion = nextVersion; - saveAspect(tx, urn, aspectName, oldAspectMetadata, oldActor, oldImpersonator, oldTime, oldSystemMetadata, largestVersion, true); + saveAspect( + tx, + urn, + aspectName, + oldAspectMetadata, + oldActor, + oldImpersonator, + oldTime, + oldSystemMetadata, + largestVersion, + true); } // Save newValue as the latest version (v0) - saveAspect(tx, urn, aspectName, newAspectMetadata, newActor, newImpersonator, newTime, newSystemMetadata, ASPECT_LATEST_VERSION, oldAspectMetadata == null); + saveAspect( + tx, + urn, + aspectName, + newAspectMetadata, + newActor, + newImpersonator, + newTime, + newSystemMetadata, + ASPECT_LATEST_VERSION, + oldAspectMetadata == null); return largestVersion; } @Override public void saveAspect( - @Nullable Transaction tx, - @Nonnull final String urn, - @Nonnull final String aspectName, - @Nonnull final String aspectMetadata, - @Nonnull final String actor, - @Nullable final String impersonator, - @Nonnull final Timestamp timestamp, - @Nonnull final String systemMetadata, - final long version, - final boolean insert) { + @Nullable Transaction tx, + @Nonnull final String urn, + @Nonnull final String aspectName, + @Nonnull final String aspectMetadata, + @Nonnull final String actor, + @Nullable final String impersonator, + @Nonnull final Timestamp timestamp, + @Nonnull final String systemMetadata, + final long version, + final boolean insert) { validateConnection(); @@ -171,12 +189,14 @@ public void saveAspect( } @Override - public void saveAspect(@Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { + public void saveAspect( + @Nullable Transaction tx, @Nonnull final EntityAspect aspect, final boolean insert) { EbeanAspectV2 ebeanAspect = EbeanAspectV2.fromEntityAspect(aspect); saveEbeanAspect(tx, ebeanAspect, insert); } - private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { + private void saveEbeanAspect( + @Nullable Transaction tx, @Nonnull final EbeanAspectV2 ebeanAspect, final boolean insert) { validateConnection(); if (insert) { _server.insert(ebeanAspect, tx); @@ -186,17 +206,22 @@ private void saveEbeanAspect(@Nullable Transaction tx, @Nonnull final EbeanAspec } @Override - public Map> getLatestAspects(@Nonnull Map> urnAspects) { + public Map> getLatestAspects( + @Nonnull Map> urnAspects) { validateConnection(); - List keys = urnAspects.entrySet().stream() - .flatMap(entry -> entry.getValue().stream() - .map(aspect -> new EbeanAspectV2.PrimaryKey(entry.getKey(), aspect, ASPECT_LATEST_VERSION)) - ).collect(Collectors.toList()); + List keys = + urnAspects.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream() + .map( + aspect -> + new EbeanAspectV2.PrimaryKey( + entry.getKey(), aspect, ASPECT_LATEST_VERSION))) + .collect(Collectors.toList()); - List results = _server.find(EbeanAspectV2.class) - .where().idIn(keys) - .findList(); + List results = _server.find(EbeanAspectV2.class).where().idIn(keys).findList(); return toUrnAspectMap(results); } @@ -204,7 +229,8 @@ public Map> getLatestAspects(@Nonnull Map batchGet(@Nonnull final Set keys) { + public Map batchGet( + @Nonnull final Set keys) { validateConnection(); if (keys.isEmpty()) { return Collections.emptyMap(); } - final Set ebeanKeys = keys.stream().map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier).collect(Collectors.toSet()); + final Set ebeanKeys = + keys.stream() + .map(EbeanAspectV2.PrimaryKey::fromAspectIdentifier) + .collect(Collectors.toSet()); final List records; if (_queryKeysCount == 0) { records = batchGet(ebeanKeys, ebeanKeys.size()); } else { records = batchGet(ebeanKeys, _queryKeysCount); } - return records.stream().collect(Collectors.toMap(record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); + return records.stream() + .collect( + Collectors.toMap( + record -> record.getKey().toAspectIdentifier(), EbeanAspectV2::toEntityAspect)); } /** - * BatchGet that allows pagination on keys to avoid large queries. - * TODO: can further improve by running the sub queries in parallel + * BatchGet that allows pagination on keys to avoid large queries. TODO: can further improve by + * running the sub queries in parallel * * @param keys a set of keys with urn, aspect and version * @param keysCount the max number of keys for each sub query */ @Nonnull - private List batchGet(@Nonnull final Set keys, final int keysCount) { + private List batchGet( + @Nonnull final Set keys, final int keysCount) { validateConnection(); int position = 0; final int totalPageCount = QueryUtils.getTotalPageCount(keys.size(), keysCount); - final List finalResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List finalResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); while (QueryUtils.hasMore(position, keysCount, totalPageCount)) { position += keysCount; - final List oneStatementResult = batchGetUnion(new ArrayList<>(keys), keysCount, position); + final List oneStatementResult = + batchGetUnion(new ArrayList<>(keys), keysCount, position); finalResult.addAll(oneStatementResult); } @@ -291,8 +334,8 @@ private List batchGet(@Nonnull final Set batchGetUnion( - @Nonnull final List keys, - final int keysCount, - final int position) { + @Nonnull final List keys, final int keysCount, final int position) { validateConnection(); - // Build one SELECT per key and then UNION ALL the results. This can be much more performant than OR'ing the + // Build one SELECT per key and then UNION ALL the results. This can be much more performant + // than OR'ing the // conditions together. Our query will look like: // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect0' AND version = 0 // UNION ALL // SELECT * FROM metadata_aspect WHERE urn = 'urn0' AND aspect = 'aspect1' AND version = 0 // ... - // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire entity key (as well + // Note: UNION ALL should be safe and more performant than UNION. We're selecting the entire + // entity key (as well // as data), so each result should be unique. No need to deduplicate. - // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement ourselves. + // Another note: ebean doesn't support UNION ALL, so we need to manually build the SQL statement + // ourselves. final StringBuilder sb = new StringBuilder(); final int end = Math.min(keys.size(), position + keysCount); final Map params = new HashMap<>(); for (int index = position; index < end; index++) { - sb.append(batchGetSelect( - index - position, - keys.get(index).getUrn(), - keys.get(index).getAspect(), - keys.get(index).getVersion(), - params)); + sb.append( + batchGetSelect( + index - position, + keys.get(index).getUrn(), + keys.get(index).getAspect(), + keys.get(index).getVersion(), + params)); if (index != end - 1) { sb.append(" UNION ALL "); } } - final RawSql rawSql = RawSqlBuilder.parse(sb.toString()) - .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") - .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") - .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") - .create(); + final RawSql rawSql = + RawSqlBuilder.parse(sb.toString()) + .columnMapping(EbeanAspectV2.URN_COLUMN, "key.urn") + .columnMapping(EbeanAspectV2.ASPECT_COLUMN, "key.aspect") + .columnMapping(EbeanAspectV2.VERSION_COLUMN, "key.version") + .create(); final Query query = _server.find(EbeanAspectV2.class).setRawSql(rawSql); @@ -373,23 +420,24 @@ public ListResult listUrns( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.KEY_ID) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.KEY_ID) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List urns = pagedList - .getList() - .stream() - .map(entry -> entry.getKey().getUrn()) - .collect(Collectors.toList()); + final List urns = + pagedList.getList().stream() + .map(entry -> entry.getKey().getUrn()) + .collect(Collectors.toList()); return toListResult(urns, null, pagedList, start); } @@ -397,7 +445,9 @@ public ListResult listUrns( @Nonnull @Override public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) { - ExpressionList exp = _server.find(EbeanAspectV2.class) + ExpressionList exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.KEY_ID) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) @@ -412,7 +462,9 @@ public Integer countAspect(@Nonnull String aspectName, @Nullable String urnLike) @Nonnull @Override public PagedList getPagedAspects(final RestoreIndicesArgs args) { - ExpressionList exp = _server.find(EbeanAspectV2.class) + ExpressionList exp = + _server + .find(EbeanAspectV2.class) .select(EbeanAspectV2.ALL_COLUMNS) .where() .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); @@ -425,24 +477,26 @@ public PagedList getPagedAspects(final RestoreIndicesArgs args) { if (args.urnLike != null) { exp = exp.like(EbeanAspectV2.URN_COLUMN, args.urnLike); } - return exp.orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.ASPECT_COLUMN) - .setFirstRow(args.start) - .setMaxRows(args.batchSize) - .findPagedList(); + return exp.orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.ASPECT_COLUMN) + .setFirstRow(args.start) + .setMaxRows(args.batchSize) + .findPagedList(); } @Override @Nonnull public Stream streamAspects(String entityName, String aspectName) { - ExpressionList exp = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); + ExpressionList exp = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .like(EbeanAspectV2.URN_COLUMN, "urn:li:" + entityName + ":%"); return exp.query().findStream().map(EbeanAspectV2::toEntityAspect); } @@ -450,14 +504,16 @@ public Stream streamAspects(String entityName, String aspectName) @Nonnull public Iterable listAllUrns(int start, int pageSize) { validateConnection(); - PagedList ebeanAspects = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(EbeanAspectV2.URN_COLUMN) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .setFirstRow(start) - .setMaxRows(pageSize) - .findPagedList(); + PagedList ebeanAspects = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select(EbeanAspectV2.URN_COLUMN) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .setFirstRow(start) + .setMaxRows(pageSize) + .findPagedList(); return ebeanAspects.getList().stream().map(EbeanAspectV2::getUrn).collect(Collectors.toList()); } @@ -473,21 +529,27 @@ public ListResult listAspectMetadata( validateConnection(); final String urnPrefixMatcher = "urn:li:" + entityName + ":%"; - final PagedList pagedList = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) - .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) - .eq(EbeanAspectV2.VERSION_COLUMN, version) - .setFirstRow(start) - .setMaxRows(pageSize) - .orderBy() - .asc(EbeanAspectV2.URN_COLUMN) - .findPagedList(); + final PagedList pagedList = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .like(EbeanAspectV2.URN_COLUMN, urnPrefixMatcher) + .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) + .eq(EbeanAspectV2.VERSION_COLUMN, version) + .setFirstRow(start) + .setMaxRows(pageSize) + .orderBy() + .asc(EbeanAspectV2.URN_COLUMN) + .findPagedList(); - final List aspects = pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); - final ListResultMetadata listResultMetadata = toListResultMetadata(pagedList.getList().stream().map( - EbeanAspectDao::toExtraInfo).collect(Collectors.toList())); + final List aspects = + pagedList.getList().stream().map(EbeanAspectV2::getMetadata).collect(Collectors.toList()); + final ListResultMetadata listResultMetadata = + toListResultMetadata( + pagedList.getList().stream() + .map(EbeanAspectDao::toExtraInfo) + .collect(Collectors.toList())); return toListResult(aspects, listResultMetadata, pagedList, start); } @@ -504,21 +566,26 @@ public ListResult listLatestAspectMetadata( @Override @Nonnull - public T runInTransactionWithRetry(@Nonnull final Function block, final int maxTransactionRetry) { + public T runInTransactionWithRetry( + @Nonnull final Function block, final int maxTransactionRetry) { return runInTransactionWithRetry(block, null, maxTransactionRetry); } @Override @Nonnull - public T runInTransactionWithRetry(@Nonnull final Function block, @Nullable AspectsBatch batch, - final int maxTransactionRetry) { + public T runInTransactionWithRetry( + @Nonnull final Function block, + @Nullable AspectsBatch batch, + final int maxTransactionRetry) { validateConnection(); int retryCount = 0; Exception lastException = null; T result = null; do { - try (Transaction transaction = _server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + _server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); result = block.apply(transaction); transaction.commit(); @@ -526,8 +593,15 @@ public T runInTransactionWithRetry(@Nonnull final Function b break; } catch (PersistenceException exception) { if (exception instanceof DuplicateKeyException) { - if (batch != null && batch.getItems().stream().allMatch(a -> a.getAspectName().equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { - log.warn("Skipping DuplicateKeyException retry since aspect is the key aspect. {}", batch.getUrnAspectsMap().keySet()); + if (batch != null + && batch.getItems().stream() + .allMatch( + a -> + a.getAspectName() + .equals(a.getEntitySpec().getKeyAspectSpec().getName()))) { + log.warn( + "Skipping DuplicateKeyException retry since aspect is the key aspect. {}", + batch.getUrnAspectsMap().keySet()); continue; } } @@ -540,7 +614,8 @@ public T runInTransactionWithRetry(@Nonnull final Function b if (lastException != null) { MetricUtils.counter(MetricRegistry.name(this.getClass(), "txFailedAfterRetries")).inc(); - throw new RetryLimitReached("Failed to add after " + maxTransactionRetry + " retries", lastException); + throw new RetryLimitReached( + "Failed to add after " + maxTransactionRetry + " retries", lastException); } return result; @@ -549,7 +624,9 @@ public T runInTransactionWithRetry(@Nonnull final Function b @Override public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { validateConnection(); - final List result = _server.find(EbeanAspectV2.class) + final List result = + _server + .find(EbeanAspectV2.class) .where() .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) .eq(EbeanAspectV2.ASPECT_COLUMN, aspectName) @@ -561,37 +638,35 @@ public long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspec return result.isEmpty() ? -1 : result.get(0).getVersion(); } - public Map> getNextVersions(@Nonnull Map> urnAspects) { + public Map> getNextVersions( + @Nonnull Map> urnAspects) { validateConnection(); - Junction queryJunction = _server.find(EbeanAspectV2.class) + Junction queryJunction = + _server + .find(EbeanAspectV2.class) .select("urn, aspect, max(version)") .where() .in("urn", urnAspects.keySet()) .or(); ExpressionList exp = null; - for (Map.Entry> entry: urnAspects.entrySet()) { + for (Map.Entry> entry : urnAspects.entrySet()) { if (exp == null) { - exp = queryJunction.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = queryJunction.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } else { - exp = exp.and() - .eq("urn", entry.getKey()) - .in("aspect", entry.getValue()) - .endAnd(); + exp = exp.and().eq("urn", entry.getKey()).in("aspect", entry.getValue()).endAnd(); } } Map> result = new HashMap<>(); // Default next version 0 - urnAspects.forEach((key, value) -> { - Map defaultNextVersion = new HashMap<>(); - value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); - result.put(key, defaultNextVersion); - }); + urnAspects.forEach( + (key, value) -> { + Map defaultNextVersion = new HashMap<>(); + value.forEach(aspectName -> defaultNextVersion.put(aspectName, 0L)); + result.put(key, defaultNextVersion); + }); if (exp == null) { return result; @@ -599,7 +674,7 @@ public Map> getNextVersions(@Nonnull Map dbResults = exp.endOr().findIds(); - for (EbeanAspectV2.PrimaryKey key: dbResults) { + for (EbeanAspectV2.PrimaryKey key : dbResults) { if (result.get(key.getUrn()).get(key.getAspect()) <= key.getVersion()) { result.get(key.getUrn()).put(key.getAspect(), key.getVersion() + 1L); } @@ -615,7 +690,9 @@ private ListResult toListResult( @Nonnull final PagedList pagedList, @Nullable final Integer start) { final int nextStart = - (start != null && pagedList.hasNext()) ? start + pagedList.getList().size() : ListResult.INVALID_NEXT_START; + (start != null && pagedList.hasNext()) + ? start + pagedList.getList().size() + : ListResult.INVALID_NEXT_START; return ListResult.builder() // Format .values(values) @@ -667,32 +744,44 @@ private ListResultMetadata toListResultMetadata(@Nonnull final List e @Override @Nonnull - public List getAspectsInRange(@Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis) { + public List getAspectsInRange( + @Nonnull Urn urn, Set aspectNames, long startTimeMillis, long endTimeMillis) { validateConnection(); - List ebeanAspects = _server.find(EbeanAspectV2.class) - .select(EbeanAspectV2.ALL_COLUMNS) - .where() - .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) - .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) - .inRange(EbeanAspectV2.CREATED_ON_COLUMN, new Timestamp(startTimeMillis), new Timestamp(endTimeMillis)) - .findList(); + List ebeanAspects = + _server + .find(EbeanAspectV2.class) + .select(EbeanAspectV2.ALL_COLUMNS) + .where() + .eq(EbeanAspectV2.URN_COLUMN, urn.toString()) + .in(EbeanAspectV2.ASPECT_COLUMN, aspectNames) + .inRange( + EbeanAspectV2.CREATED_ON_COLUMN, + new Timestamp(startTimeMillis), + new Timestamp(endTimeMillis)) + .findList(); return ebeanAspects.stream().map(EbeanAspectV2::toEntityAspect).collect(Collectors.toList()); } private static Map toAspectMap(Set beans) { - return beans.stream().map(bean -> Map.entry(bean.getAspect(), bean)) - .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); + return beans.stream() + .map(bean -> Map.entry(bean.getAspect(), bean)) + .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toEntityAspect())); } - private static Map> toUrnAspectMap(Collection beans) { + private static Map> toUrnAspectMap( + Collection beans) { return beans.stream() - .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) - .entrySet().stream() - .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.groupingBy(EbeanAspectV2::getUrn, Collectors.toSet())) + .entrySet() + .stream() + .map(e -> Map.entry(e.getKey(), toAspectMap(e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - private static String buildMetricName(EntitySpec entitySpec, AspectSpec aspectSpec, String status) { - return String.join(MetricUtils.DELIMITER, List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); + private static String buildMetricName( + EntitySpec entitySpec, AspectSpec aspectSpec, String status) { + return String.join( + MetricUtils.DELIMITER, + List.of(entitySpec.getName(), aspectSpec.getName(), status.toLowerCase())); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java index 3d2a4a5ae051c..648b7cd6a65b0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV1.java @@ -16,10 +16,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the legacy aspect table. - */ +/** Schema definition for the legacy aspect table. */ @Getter @Setter @Entity @@ -38,9 +35,7 @@ public class EbeanAspectV1 extends Model { public static final String CREATED_BY_COLUMN = "createdBy"; public static final String CREATED_FOR_COLUMN = "createdFor"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -65,10 +60,7 @@ public static class PrimaryKey { private long version; } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Lob diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java index 3215542ffd347..71e52ed403b9b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanAspectV2.java @@ -19,10 +19,7 @@ import lombok.NonNull; import lombok.Setter; - -/** - * Schema definition for the new aspect table. - */ +/** Schema definition for the new aspect table. */ @Getter @Setter @NoArgsConstructor @@ -45,9 +42,7 @@ public class EbeanAspectV2 extends Model { public static final String SYSTEM_METADATA_COLUMN = "systemmetadata"; - /** - * Key for an aspect in the table. - */ + /** Key for an aspect in the table. */ @Embeddable @Getter @AllArgsConstructor @@ -80,10 +75,7 @@ public EntityAspectIdentifier toAspectIdentifier() { } } - @NonNull - @EmbeddedId - @Index - protected PrimaryKey key; + @NonNull @EmbeddedId @Index protected PrimaryKey key; @NonNull @Column(name = URN_COLUMN, length = 500, nullable = false) @@ -115,9 +107,24 @@ public EntityAspectIdentifier toAspectIdentifier() { @Column(name = SYSTEM_METADATA_COLUMN, nullable = true) protected String systemMetadata; - public EbeanAspectV2(String urn, String aspect, long version, String metadata, Timestamp createdOn, String createdBy, - String createdFor, String systemMetadata) { - this(new PrimaryKey(urn, aspect, version), urn, aspect, version, metadata, createdOn, createdBy, createdFor, + public EbeanAspectV2( + String urn, + String aspect, + long version, + String metadata, + Timestamp createdOn, + String createdBy, + String createdFor, + String systemMetadata) { + this( + new PrimaryKey(urn, aspect, version), + urn, + aspect, + version, + metadata, + createdOn, + createdBy, + createdFor, systemMetadata); } @@ -131,8 +138,7 @@ public EntityAspect toEntityAspect() { getSystemMetadata(), getCreatedOn(), getCreatedBy(), - getCreatedFor() - ); + getCreatedFor()); } public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { @@ -144,7 +150,6 @@ public static EbeanAspectV2 fromEntityAspect(EntityAspect aspect) { aspect.getCreatedOn(), aspect.getCreatedBy(), aspect.getCreatedFor(), - aspect.getSystemMetadata() - ); + aspect.getSystemMetadata()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java index d94ec1fa7ae2b..e12f0f8f1b5d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/EbeanRetentionService.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.entity.ebean; -import com.linkedin.common.urn.Urn; import com.datahub.util.RecordUtils; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -13,7 +14,6 @@ import com.linkedin.retention.Retention; import com.linkedin.retention.TimeBasedRetention; import com.linkedin.retention.VersionBasedRetention; -import com.linkedin.metadata.Constants; import io.ebean.Database; import io.ebean.Expression; import io.ebean.ExpressionList; @@ -36,7 +36,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class EbeanRetentionService extends RetentionService { @@ -53,22 +52,26 @@ public EntityService getEntityService() { @Override protected AspectsBatch buildAspectsBatch(List mcps) { - return AspectsBatchImpl.builder() - .mcps(mcps, _entityService.getEntityRegistry()) - .build(); + return AspectsBatchImpl.builder().mcps(mcps, _entityService.getEntityRegistry()).build(); } @Override @WithSpan protected void applyRetention(List retentionContexts) { - List nonEmptyContexts = retentionContexts.stream() - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()).collect(Collectors.toList()); + List nonEmptyContexts = + retentionContexts.stream() + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) + .collect(Collectors.toList()); // Only run delete if at least one of the retention policies are applicable if (!nonEmptyContexts.isEmpty()) { - ExpressionList deleteQuery = _server.find(EbeanAspectV2.class) + ExpressionList deleteQuery = + _server + .find(EbeanAspectV2.class) .where() .ne(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .or(); @@ -78,25 +81,32 @@ protected void applyRetention(List retentionContexts) { Retention retentionPolicy = context.getRetentionPolicy().get(); if (retentionPolicy.hasVersion()) { - boolean appliedVersion = getVersionBasedRetentionQuery(context.getUrn(), context.getAspectName(), - retentionPolicy.getVersion(), context.getMaxVersion()) - .map(expr -> - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(expr) - .endAnd() - ).isPresent(); + boolean appliedVersion = + getVersionBasedRetentionQuery( + context.getUrn(), + context.getAspectName(), + retentionPolicy.getVersion(), + context.getMaxVersion()) + .map( + expr -> + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(expr) + .endAnd()) + .isPresent(); applied = appliedVersion || applied; } if (retentionPolicy.hasTime()) { - deleteQuery.and() - .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) - .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) - .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) - .endAnd(); + deleteQuery + .and() + .eq(EbeanAspectV2.URN_COLUMN, context.getUrn().toString()) + .eq(EbeanAspectV2.ASPECT_COLUMN, context.getAspectName()) + .add(getTimeBasedRetentionQuery(retentionPolicy.getTime())) + .endAnd(); applied = true; } } @@ -108,13 +118,15 @@ protected void applyRetention(List retentionContexts) { } private long getMaxVersion(@Nonnull final String urn, @Nonnull final String aspectName) { - List result = _server.find(EbeanAspectV2.class) - .where() - .eq("urn", urn) - .eq("aspect", aspectName) - .orderBy() - .desc("version") - .findList(); + List result = + _server + .find(EbeanAspectV2.class) + .where() + .eq("urn", urn) + .eq("aspect", aspectName) + .orderBy() + .desc("version") + .findList(); if (result.size() == 0) { return -1; } @@ -126,57 +138,63 @@ private Optional getVersionBasedRetentionQuery( @Nonnull String aspectName, @Nonnull final VersionBasedRetention retention, @Nonnull final Optional maxVersionFromUpdate) { - long largestVersion = maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); + long largestVersion = + maxVersionFromUpdate.orElseGet(() -> getMaxVersion(urn.toString(), aspectName)); if (largestVersion < retention.getMaxVersions()) { return Optional.empty(); } return Optional.of( - new SimpleExpression(EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); + new SimpleExpression( + EbeanAspectV2.VERSION_COLUMN, Op.LT, largestVersion - retention.getMaxVersions() + 1)); } private Expression getTimeBasedRetentionQuery(@Nonnull final TimeBasedRetention retention) { - return new SimpleExpression(EbeanAspectV2.CREATED_ON_COLUMN, Op.LT, + return new SimpleExpression( + EbeanAspectV2.CREATED_ON_COLUMN, + Op.LT, new Timestamp(_clock.millis() - retention.getMaxAgeInSeconds() * 1000)); } private void applyRetention( - PagedList rows, - Map retentionPolicyMap, - BulkApplyRetentionResult applyRetentionResult - ) { + PagedList rows, + Map retentionPolicyMap, + BulkApplyRetentionResult applyRetentionResult) { try (Transaction transaction = _server.beginTransaction(TxScope.required())) { transaction.setBatchMode(true); transaction.setBatchSize(_batchSize); - List retentionContexts = rows.getList().stream() + List retentionContexts = + rows.getList().stream() .filter(row -> row.getVersion() != 0) - .map(row -> { - // 1. Extract an Entity type from the entity Urn - Urn urn; - try { - urn = Urn.createFromString(row.getUrn()); - } catch (Exception e) { - log.error("Failed to serialize urn {}", row.getUrn(), e); - return null; - } - - final String aspectNameFromRecord = row.getAspect(); - log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); - // Get the retention policies to apply from the local retention policy map - Optional retentionPolicy = getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() - .map(key -> retentionPolicyMap.get(key.toString())) - .filter(Objects::nonNull) - .findFirst() - .map(DataHubRetentionConfig::getRetention); - - return RetentionService.RetentionContext.builder() + .map( + row -> { + // 1. Extract an Entity type from the entity Urn + Urn urn; + try { + urn = Urn.createFromString(row.getUrn()); + } catch (Exception e) { + log.error("Failed to serialize urn {}", row.getUrn(), e); + return null; + } + + final String aspectNameFromRecord = row.getAspect(); + log.debug("Handling urn {} aspect {}", row.getUrn(), row.getAspect()); + // Get the retention policies to apply from the local retention policy map + Optional retentionPolicy = + getRetentionKeys(urn.getEntityType(), aspectNameFromRecord).stream() + .map(key -> retentionPolicyMap.get(key.toString())) + .filter(Objects::nonNull) + .findFirst() + .map(DataHubRetentionConfig::getRetention); + + return RetentionService.RetentionContext.builder() .urn(urn) .aspectName(aspectNameFromRecord) .retentionPolicy(retentionPolicy) .maxVersion(Optional.of(row.getVersion())) .build(); - }) + }) .filter(Objects::nonNull) .collect(Collectors.toList()); @@ -209,7 +227,8 @@ public void batchApplyRetention(@Nullable String entityName, @Nullable String as } @Override - public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args) { + public BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args) { long startTime = System.currentTimeMillis(); BulkApplyRetentionResult result = new BulkApplyRetentionResult(); @@ -223,13 +242,18 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe result.timeRetentionPolicyMapMs = System.currentTimeMillis() - startTime; startTime = System.currentTimeMillis(); - //only supports version based retention for batch apply - //find urn, aspect pair where distinct versions > 20 to apply retention policy - Query query = _server.find(EbeanAspectV2.class) + // only supports version based retention for batch apply + // find urn, aspect pair where distinct versions > 20 to apply retention policy + Query query = + _server + .find(EbeanAspectV2.class) .setDistinct(true) - .select(String.format( - "%s, %s, count(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, EbeanAspectV2.VERSION_COLUMN) - ); + .select( + String.format( + "%s, %s, count(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)); ExpressionList exp = null; if (args.urn != null || args.aspectName != null) { exp = query.where(); @@ -246,8 +270,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe exp = exp.having(); } - PagedList rows = exp - .gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) + PagedList rows = + exp.gt(String.format("count(%s)", EbeanAspectV2.VERSION_COLUMN), args.attemptWithVersion) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -262,7 +286,8 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe log.error("Failed to serialize urn {}", row.getUrn(), e); continue; } - PagedList rowsToChange = queryCandidates(row.getUrn(), null, row.getAspect()) + PagedList rowsToChange = + queryCandidates(row.getUrn(), null, row.getAspect()) .setFirstRow(args.start) .setMaxRows(args.count) .findPagedList(); @@ -275,25 +300,39 @@ public BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRe } private Map getAllRetentionPolicies() { - return _server.find(EbeanAspectV2.class) - .select(String.format("%s, %s, %s", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.METADATA_COLUMN)) + return _server + .find(EbeanAspectV2.class) + .select( + String.format( + "%s, %s, %s", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.METADATA_COLUMN)) .where() .eq(EbeanAspectV2.ASPECT_COLUMN, Constants.DATAHUB_RETENTION_ASPECT) .eq(EbeanAspectV2.VERSION_COLUMN, Constants.ASPECT_LATEST_VERSION) .findList() .stream() - .collect(Collectors.toMap(EbeanAspectV2::getUrn, - row -> RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); + .collect( + Collectors.toMap( + EbeanAspectV2::getUrn, + row -> + RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, row.getMetadata()))); } - private ExpressionList queryCandidates(@Nullable String urn, - @Nullable String entityName, @Nullable String aspectName) { - ExpressionList query = _server.find(EbeanAspectV2.class) - .setDistinct(true) - .select(String.format("%s, %s, max(%s)", EbeanAspectV2.URN_COLUMN, EbeanAspectV2.ASPECT_COLUMN, - EbeanAspectV2.VERSION_COLUMN)) - .where(); + private ExpressionList queryCandidates( + @Nullable String urn, @Nullable String entityName, @Nullable String aspectName) { + ExpressionList query = + _server + .find(EbeanAspectV2.class) + .setDistinct(true) + .select( + String.format( + "%s, %s, max(%s)", + EbeanAspectV2.URN_COLUMN, + EbeanAspectV2.ASPECT_COLUMN, + EbeanAspectV2.VERSION_COLUMN)) + .where(); if (urn != null) { query.eq(EbeanAspectV2.URN_COLUMN, urn); } @@ -306,10 +345,13 @@ private ExpressionList queryCandidates(@Nullable String urn, return query; } - private PagedList getPagedAspects(@Nullable String entityName, @Nullable String aspectName, - final int start, final int pageSize) { - return queryCandidates(null, entityName, aspectName).orderBy( - EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) + private PagedList getPagedAspects( + @Nullable String entityName, + @Nullable String aspectName, + final int start, + final int pageSize) { + return queryCandidates(null, entityName, aspectName) + .orderBy(EbeanAspectV2.URN_COLUMN + ", " + EbeanAspectV2.ASPECT_COLUMN) .setFirstRow(start) .setMaxRows(pageSize) .findPagedList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java index ca5e070bc5ca7..11261afdaa0b2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/AspectsBatchImpl.java @@ -5,63 +5,67 @@ import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class AspectsBatchImpl implements AspectsBatch { - private final List items; - - public static class AspectsBatchImplBuilder { - /** - * Just one aspect record template - * @param data aspect data - * @return builder - */ - public AspectsBatchImplBuilder one(AbstractBatchItem data) { - this.items = List.of(data); - return this; - } + private final List items; - public AspectsBatchImplBuilder mcps(List mcps, EntityRegistry entityRegistry) { - this.items = mcps.stream().map(mcp -> { - if (mcp.getChangeType().equals(ChangeType.PATCH)) { - return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); - } else { - return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); - } - }).collect(Collectors.toList()); - return this; - } + public static class AspectsBatchImplBuilder { + /** + * Just one aspect record template + * + * @param data aspect data + * @return builder + */ + public AspectsBatchImplBuilder one(AbstractBatchItem data) { + this.items = List.of(data); + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - AspectsBatchImpl that = (AspectsBatchImpl) o; - return Objects.equals(items, that.items); + public AspectsBatchImplBuilder mcps( + List mcps, EntityRegistry entityRegistry) { + this.items = + mcps.stream() + .map( + mcp -> { + if (mcp.getChangeType().equals(ChangeType.PATCH)) { + return PatchBatchItem.PatchBatchItemBuilder.build(mcp, entityRegistry); + } else { + return UpsertBatchItem.UpsertBatchItemBuilder.build(mcp, entityRegistry); + } + }) + .collect(Collectors.toList()); + return this; } + } - @Override - public int hashCode() { - return Objects.hash(items); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "AspectsBatchImpl{" + "items=" + items + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + AspectsBatchImpl that = (AspectsBatchImpl) o; + return Objects.equals(items, that.items); + } + + @Override + public int hashCode() { + return Objects.hash(items); + } + + @Override + public String toString() { + return "AspectsBatchImpl{" + "items=" + items + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java index cc0b3d915b407..f9b1e340d5541 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/PatchBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -20,169 +22,195 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Objects; - -import static com.linkedin.metadata.Constants.*; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Getter @Builder(toBuilder = true) public class PatchBatchItem extends AbstractBatchItem { - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final Patch patch; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.PATCH; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { + UpsertBatchItem.UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(getUrn()) + .aspectName(getAspectName()) + .metadataChangeProposal(getMetadataChangeProposal()) + .systemMetadata(getSystemMetadata()); + + AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + + RecordTemplate currentValue = + recordTemplate != null + ? recordTemplate + : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + + if (currentValue == null) { + // Attempting to patch a value to an aspect which has no default value and no existing value. + throw new UnsupportedOperationException( + String.format( + "Patch not supported for aspect with name %s. " + + "Default aspect is required because no aspect currently exists for urn %s.", + getAspectName(), getUrn())); } - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final Patch patch; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.PATCH; + try { + builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); + } catch (JsonProcessingException | JsonPatchException e) { + throw new RuntimeException(e); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); - } + return builder.build(entityRegistry); + } + + public static class PatchBatchItemBuilder { - public UpsertBatchItem applyPatch(EntityRegistry entityRegistry, RecordTemplate recordTemplate) { - UpsertBatchItem.UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(getUrn()) - .aspectName(getAspectName()) - .metadataChangeProposal(getMetadataChangeProposal()) - .systemMetadata(getSystemMetadata()); + public PatchBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); - AspectTemplateEngine aspectTemplateEngine = entityRegistry.getAspectTemplateEngine(); + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); - RecordTemplate currentValue = recordTemplate != null ? recordTemplate - : aspectTemplateEngine.getDefaultTemplate(getAspectName()); + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); - if (currentValue == null) { - // Attempting to patch a value to an aspect which has no default value and no existing value. - throw new UnsupportedOperationException(String.format("Patch not supported for aspect with name %s. " - + "Default aspect is required because no aspect currently exists for urn %s.", getAspectName(), getUrn())); - } + if (this.patch == null) { + throw new IllegalArgumentException( + String.format("Missing patch to apply. Aspect: %s", this.aspectSpec.getName())); + } - try { - builder.aspect(aspectTemplateEngine.applyPatch(currentValue, getPatch(), getAspectSpec())); - } catch (JsonProcessingException | JsonPatchException e) { - throw new RuntimeException(e); - } + return new PatchBatchItem( + this.urn, + this.aspectName, + generateSystemMetadataIfEmpty(this.systemMetadata), + this.patch, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); + } - return builder.build(entityRegistry); + public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + PatchBatchItemBuilder builder = + PatchBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .patch(convertToJsonPatch(mcp)); + + return builder.build(entityRegistry); } - public static class PatchBatchItemBuilder { - - public PatchBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - if (this.patch == null) { - throw new IllegalArgumentException(String.format("Missing patch to apply. Aspect: %s", - this.aspectSpec.getName())); - } - - return new PatchBatchItem(this.urn, this.aspectName, generateSystemMetadataIfEmpty(this.systemMetadata), - this.patch, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static PatchBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.PATCH, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - PatchBatchItemBuilder builder = PatchBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .patch(convertToJsonPatch(mcp)); - - return builder.build(entityRegistry); - } - - private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { - JsonNode json; - try { - json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); - return JsonPatch.fromJson(json); - } catch (IOException e) { - throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); - } - } + private PatchBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - PatchBatchItem that = (PatchBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && patch.equals(that.patch); + private PatchBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, patch); + private static Patch convertToJsonPatch(MetadataChangeProposal mcp) { + JsonNode json; + try { + json = OBJECT_MAPPER.readTree(mcp.getAspect().getValue().asString(StandardCharsets.UTF_8)); + return JsonPatch.fromJson(json); + } catch (IOException e) { + throw new IllegalArgumentException("Invalid JSON Patch: " + mcp.getAspect().getValue(), e); + } } + } - @Override - public String toString() { - return "PatchBatchItem{" - + "urn=" + urn - + ", aspectName='" + aspectName - + '\'' - + ", systemMetadata=" + systemMetadata - + ", patch=" + patch - + '}'; + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; } + PatchBatchItem that = (PatchBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && patch.equals(that.patch); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, patch); + } + + @Override + public String toString() { + return "PatchBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", patch=" + + patch + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java index bd58d267a8308..c232e4846f7d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/transactions/UpsertBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.ebean.transactions; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.datahub.util.exception.ModelConversionException; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -16,158 +18,172 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.sql.Timestamp; +import java.util.Objects; import lombok.Builder; import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import java.sql.Timestamp; -import java.util.Objects; - -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - @Slf4j @Getter @Builder(toBuilder = true) public class UpsertBatchItem extends AbstractBatchItem { - // urn an urn associated with the new aspect - private final Urn urn; - // aspectName name of the aspect being inserted - private final String aspectName; - private final SystemMetadata systemMetadata; - - private final RecordTemplate aspect; - - private final MetadataChangeProposal metadataChangeProposal; - - // derived - private final EntitySpec entitySpec; - private final AspectSpec aspectSpec; - - @Override - public ChangeType getChangeType() { - return ChangeType.UPSERT; + // urn an urn associated with the new aspect + private final Urn urn; + // aspectName name of the aspect being inserted + private final String aspectName; + private final SystemMetadata systemMetadata; + + private final RecordTemplate aspect; + + private final MetadataChangeProposal metadataChangeProposal; + + // derived + private final EntitySpec entitySpec; + private final AspectSpec aspectSpec; + + @Override + public ChangeType getChangeType() { + return ChangeType.UPSERT; + } + + @Override + public void validateUrn(EntityRegistry entityRegistry, Urn urn) { + EntityUtils.validateUrn(entityRegistry, urn); + } + + public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { + EntityAspect latest = new EntityAspect(); + latest.setAspect(getAspectName()); + latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); + latest.setUrn(getUrn().toString()); + latest.setVersion(ASPECT_LATEST_VERSION); + latest.setCreatedOn(new Timestamp(auditStamp.getTime())); + latest.setCreatedBy(auditStamp.getActor().toString()); + return latest; + } + + public static class UpsertBatchItemBuilder { + + public UpsertBatchItem build(EntityRegistry entityRegistry) { + EntityUtils.validateUrn(entityRegistry, this.urn); + log.debug("entity type = {}", this.urn.getEntityType()); + + entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); + log.debug("entity spec = {}", this.entitySpec); + + aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); + log.debug("aspect spec = {}", this.aspectSpec); + + ValidationUtils.validateRecordTemplate( + entityRegistry, this.entitySpec, this.urn, this.aspect); + + return new UpsertBatchItem( + this.urn, + this.aspectName, + AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), + this.aspect, + this.metadataChangeProposal, + this.entitySpec, + this.aspectSpec); } - @Override - public void validateUrn(EntityRegistry entityRegistry, Urn urn) { - EntityUtils.validateUrn(entityRegistry, urn); + public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { + if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { + throw new IllegalArgumentException( + "Invalid MCP, this class only supports change type of UPSERT."); + } + + log.debug("entity type = {}", mcp.getEntityType()); + EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); + AspectSpec aspectSpec = validateAspect(mcp, entitySpec); + + if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { + throw new UnsupportedOperationException( + "ChangeType not supported: " + + mcp.getChangeType() + + " for aspect " + + mcp.getAspectName()); + } + + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + + UpsertBatchItemBuilder builder = + UpsertBatchItem.builder() + .urn(urn) + .aspectName(mcp.getAspectName()) + .systemMetadata(mcp.getSystemMetadata()) + .metadataChangeProposal(mcp) + .aspect(convertToRecordTemplate(mcp, aspectSpec)); + + return builder.build(entityRegistry); } - public EntityAspect toLatestEntityAspect(AuditStamp auditStamp) { - EntityAspect latest = new EntityAspect(); - latest.setAspect(getAspectName()); - latest.setMetadata(EntityUtils.toJsonAspect(getAspect())); - latest.setUrn(getUrn().toString()); - latest.setVersion(ASPECT_LATEST_VERSION); - latest.setCreatedOn(new Timestamp(auditStamp.getTime())); - latest.setCreatedBy(auditStamp.getActor().toString()); - return latest; + private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { + this.entitySpec = entitySpec; + return this; } - public static class UpsertBatchItemBuilder { - - public UpsertBatchItem build(EntityRegistry entityRegistry) { - EntityUtils.validateUrn(entityRegistry, this.urn); - log.debug("entity type = {}", this.urn.getEntityType()); - - entitySpec(entityRegistry.getEntitySpec(this.urn.getEntityType())); - log.debug("entity spec = {}", this.entitySpec); - - aspectSpec(ValidationUtils.validate(this.entitySpec, this.aspectName)); - log.debug("aspect spec = {}", this.aspectSpec); - - ValidationUtils.validateRecordTemplate(entityRegistry, this.entitySpec, this.urn, this.aspect); - - return new UpsertBatchItem(this.urn, this.aspectName, AbstractBatchItem.generateSystemMetadataIfEmpty(this.systemMetadata), - this.aspect, this.metadataChangeProposal, this.entitySpec, this.aspectSpec); - } - - public static UpsertBatchItem build(MetadataChangeProposal mcp, EntityRegistry entityRegistry) { - if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { - throw new IllegalArgumentException("Invalid MCP, this class only supports change type of UPSERT."); - } - - log.debug("entity type = {}", mcp.getEntityType()); - EntitySpec entitySpec = entityRegistry.getEntitySpec(mcp.getEntityType()); - AspectSpec aspectSpec = validateAspect(mcp, entitySpec); - - if (!isValidChangeType(ChangeType.UPSERT, aspectSpec)) { - throw new UnsupportedOperationException("ChangeType not supported: " + mcp.getChangeType() - + " for aspect " + mcp.getAspectName()); - } - - Urn urn = mcp.getEntityUrn(); - if (urn == null) { - urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); - } - - UpsertBatchItemBuilder builder = UpsertBatchItem.builder() - .urn(urn) - .aspectName(mcp.getAspectName()) - .systemMetadata(mcp.getSystemMetadata()) - .metadataChangeProposal(mcp) - .aspect(convertToRecordTemplate(mcp, aspectSpec)); - - return builder.build(entityRegistry); - } - - private UpsertBatchItemBuilder entitySpec(EntitySpec entitySpec) { - this.entitySpec = entitySpec; - return this; - } - - private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { - this.aspectSpec = aspectSpec; - return this; - } - - private static RecordTemplate convertToRecordTemplate(MetadataChangeProposal mcp, AspectSpec aspectSpec) { - RecordTemplate aspect; - try { - aspect = GenericRecordUtils.deserializeAspect(mcp.getAspect().getValue(), - mcp.getAspect().getContentType(), aspectSpec); - ValidationUtils.validateOrThrow(aspect); - } catch (ModelConversionException e) { - throw new RuntimeException( - String.format("Could not deserialize %s for aspect %s", mcp.getAspect().getValue(), - mcp.getAspectName())); - } - log.debug("aspect = {}", aspect); - return aspect; - } + private UpsertBatchItemBuilder aspectSpec(AspectSpec aspectSpec) { + this.aspectSpec = aspectSpec; + return this; } - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - UpsertBatchItem that = (UpsertBatchItem) o; - return urn.equals(that.urn) && aspectName.equals(that.aspectName) && Objects.equals(systemMetadata, that.systemMetadata) && aspect.equals(that.aspect); + private static RecordTemplate convertToRecordTemplate( + MetadataChangeProposal mcp, AspectSpec aspectSpec) { + RecordTemplate aspect; + try { + aspect = + GenericRecordUtils.deserializeAspect( + mcp.getAspect().getValue(), mcp.getAspect().getContentType(), aspectSpec); + ValidationUtils.validateOrThrow(aspect); + } catch (ModelConversionException e) { + throw new RuntimeException( + String.format( + "Could not deserialize %s for aspect %s", + mcp.getAspect().getValue(), mcp.getAspectName())); + } + log.debug("aspect = {}", aspect); + return aspect; } + } - @Override - public int hashCode() { - return Objects.hash(urn, aspectName, systemMetadata, aspect); + @Override + public boolean equals(Object o) { + if (this == o) { + return true; } - - @Override - public String toString() { - return "UpsertBatchItem{" - + "urn=" - + urn - + ", aspectName='" - + aspectName - + '\'' - + ", systemMetadata=" - + systemMetadata - + ", aspect=" - + aspect - + '}'; + if (o == null || getClass() != o.getClass()) { + return false; } + UpsertBatchItem that = (UpsertBatchItem) o; + return urn.equals(that.urn) + && aspectName.equals(that.aspectName) + && Objects.equals(systemMetadata, that.systemMetadata) + && aspect.equals(that.aspect); + } + + @Override + public int hashCode() { + return Objects.hash(urn, aspectName, systemMetadata, aspect); + } + + @Override + public String toString() { + return "UpsertBatchItem{" + + "urn=" + + urn + + ", aspectName='" + + aspectName + + '\'' + + ", systemMetadata=" + + systemMetadata + + ", aspect=" + + aspect + + '}'; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java index 4c4bfb41867ef..ad8fbfdf2eddd 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/EntityRegistryUrnValidator.java @@ -24,12 +24,10 @@ import java.util.stream.Collectors; import lombok.Setter; - public class EntityRegistryUrnValidator implements Validator { private final EntityRegistry _entityRegistry; - @Setter - private EntitySpec currentEntitySpec = null; + @Setter private EntitySpec currentEntitySpec = null; public EntityRegistryUrnValidator(EntityRegistry entityRegistry) { _entityRegistry = entityRegistry; @@ -43,45 +41,61 @@ public void validate(ValidatorContext context) { } protected void validateUrnField(ValidatorContext context) { - if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) && ((NamedDataSchema) context.dataElement() - .getSchema()).getName().endsWith("Urn")) { + if (Type.TYPEREF.equals(context.dataElement().getSchema().getType()) + && ((NamedDataSchema) context.dataElement().getSchema()).getName().endsWith("Urn")) { try { // Validate Urn matches field type and that it generates a valid key String urnStr = (String) context.dataElement().getValue(); Urn urn = Urn.createFromString(urnStr); EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); - RecordTemplate entityKey = EntityKeyUtils.convertUrnToEntityKey(urn, - entitySpec.getKeyAspectSpec()); + RecordTemplate entityKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); NamedDataSchema namedDataSchema = ((NamedDataSchema) context.dataElement().getSchema()); Class urnClass; try { - String schemaName = ((Map) namedDataSchema.getProperties().get("java")).get("class"); + String schemaName = + ((Map) namedDataSchema.getProperties().get("java")).get("class"); urnClass = (Class) Class.forName(schemaName); urnClass.getDeclaredMethod("createFromString", String.class).invoke(null, urnStr); } catch (ClassNotFoundException | ClassCastException | NoSuchMethodException e) { - throw new IllegalArgumentException("Unrecognized Urn class: " + namedDataSchema.getName(), e); + throw new IllegalArgumentException( + "Unrecognized Urn class: " + namedDataSchema.getName(), e); } catch (InvocationTargetException | IllegalAccessException e) { - throw new IllegalArgumentException("Unable to instantiate urn type: " + namedDataSchema.getName() + " with urn: " + urnStr, e); + throw new IllegalArgumentException( + "Unable to instantiate urn type: " + + namedDataSchema.getName() + + " with urn: " + + urnStr, + e); } // Validate generic Urn is valid entity type for relationship destination PathSpec fieldPath = context.dataElement().getSchemaPathSpec(); - List relationshipSpecs = currentEntitySpec.getRelationshipFieldSpecs().stream().filter(relationshipFieldSpec -> - relationshipFieldSpec.getPath().equals(fieldPath)) - .collect(Collectors.toList()); + List relationshipSpecs = + currentEntitySpec.getRelationshipFieldSpecs().stream() + .filter(relationshipFieldSpec -> relationshipFieldSpec.getPath().equals(fieldPath)) + .collect(Collectors.toList()); if (!relationshipSpecs.isEmpty()) { for (RelationshipFieldSpec relationshipFieldSpec : relationshipSpecs) { - boolean isValidDestination = relationshipFieldSpec.getValidDestinationTypes().stream() - .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); + boolean isValidDestination = + relationshipFieldSpec.getValidDestinationTypes().stream() + .anyMatch(destinationType -> destinationType.equals(urn.getEntityType())); if (!isValidDestination) { throw new IllegalArgumentException( - "Entity type for urn: " + urn + " is not a valid destination for field path: " + fieldPath); + "Entity type for urn: " + + urn + + " is not a valid destination for field path: " + + fieldPath); } } } } catch (URISyntaxException | IllegalArgumentException e) { - context.addResult(new Message(context.dataElement().path(), "\"Provided urn %s\" is invalid: %s", - context.dataElement().getValue(), e.getMessage())); + context.addResult( + new Message( + context.dataElement().path(), + "\"Provided urn %s\" is invalid: %s", + context.dataElement().getValue(), + e.getMessage())); context.setHasFix(false); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java index e7934bc47be3f..12e39f0349143 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/RecordTemplateValidator.java @@ -15,45 +15,41 @@ @Slf4j public class RecordTemplateValidator { - private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - ); - - private static final UrnValidator URN_VALIDATOR = new UrnValidator(); - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer onValidationFailure) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - URN_VALIDATOR); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + private static final ValidationOptions DEFAULT_VALIDATION_OPTIONS = + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW); + + private static final UrnValidator URN_VALIDATOR = new UrnValidator(); + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer onValidationFailure) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, URN_VALIDATOR); + if (!result.isValid()) { + onValidationFailure.accept(result); } - - /** - * Validates a {@link RecordTemplate} and applies a function if validation fails - * - * @param record record to be validated.ailure. - */ - public static void validate(RecordTemplate record, Consumer onValidationFailure, Validator validator) { - final ValidationResult result = ValidateDataAgainstSchema.validate( - record, - DEFAULT_VALIDATION_OPTIONS, - validator); - if (!result.isValid()) { - onValidationFailure.accept(result); - } + } + + /** + * Validates a {@link RecordTemplate} and applies a function if validation fails + * + * @param record record to be validated.ailure. + */ + public static void validate( + RecordTemplate record, Consumer onValidationFailure, Validator validator) { + final ValidationResult result = + ValidateDataAgainstSchema.validate(record, DEFAULT_VALIDATION_OPTIONS, validator); + if (!result.isValid()) { + onValidationFailure.accept(result); } + } - private RecordTemplateValidator() { - - } + private RecordTemplateValidator() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java index e0b026fa84d18..6a86a02a94449 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationException.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.entity.validation; -/** - * Exception thrown when a metadata record cannot be validated against its schema. - */ +/** Exception thrown when a metadata record cannot be validated against its schema. */ public class ValidationException extends RuntimeException { public ValidationException(final String message) { super(message); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 6182b27333cbb..7f23bacdc4758 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -7,27 +7,27 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Consumer; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { /** - * Validates a {@link RecordTemplate} and throws {@link com.linkedin.restli.server.RestLiServiceException} - * if validation fails. + * Validates a {@link RecordTemplate} and throws {@link + * com.linkedin.restli.server.RestLiServiceException} if validation fails. * * @param record record to be validated. */ public static void validateOrThrow(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - throw new ValidationException( - String.format("Failed to validate record with class %s: %s", - record.getClass().getName(), - validationResult.getMessages().toString())); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + throw new ValidationException( + String.format( + "Failed to validate record with class %s: %s", + record.getClass().getName(), validationResult.getMessages().toString())); + }); } /** @@ -36,41 +36,51 @@ public static void validateOrThrow(RecordTemplate record) { * @param record record to be validated.ailure. */ public static void validateOrWarn(RecordTemplate record) { - RecordTemplateValidator.validate(record, validationResult -> { - log.warn(String.format("Failed to validate record %s against its schema.", record)); - }); + RecordTemplateValidator.validate( + record, + validationResult -> { + log.warn(String.format("Failed to validate record %s against its schema.", record)); + }); } public static AspectSpec validate(EntitySpec entitySpec, String aspectName) { if (aspectName == null || aspectName.isEmpty()) { - throw new UnsupportedOperationException("Aspect name is required for create and update operations"); + throw new UnsupportedOperationException( + "Aspect name is required for create and update operations"); } AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); + String.format("Unknown aspect %s for entity %s", aspectName, entitySpec.getName())); } return aspectSpec; } - public static void validateRecordTemplate(EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, EntitySpec entitySpec, Urn urn, RecordTemplate aspect) { EntityRegistryUrnValidator validator = new EntityRegistryUrnValidator(entityRegistry); validator.setCurrentEntitySpec(entitySpec); - Consumer resultFunction = validationResult -> { - throw new IllegalArgumentException("Invalid format for aspect: " + entitySpec.getName() + "\n Cause: " - + validationResult.getMessages()); }; - RecordTemplateValidator.validate(EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); + Consumer resultFunction = + validationResult -> { + throw new IllegalArgumentException( + "Invalid format for aspect: " + + entitySpec.getName() + + "\n Cause: " + + validationResult.getMessages()); + }; + RecordTemplateValidator.validate( + EntityUtils.buildKeyAspect(entityRegistry, urn), resultFunction, validator); RecordTemplateValidator.validate(aspect, resultFunction, validator); } - public static void validateRecordTemplate(EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { + public static void validateRecordTemplate( + EntityRegistry entityRegistry, Urn urn, RecordTemplate aspect) { EntitySpec entitySpec = entityRegistry.getEntitySpec(urn.getEntityType()); validateRecordTemplate(entityRegistry, entitySpec, urn, aspect); } - private ValidationUtils() { - } -} \ No newline at end of file + private ValidationUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java index 90e171d3c357e..becf86cdbe92f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EntityEventProducer.java @@ -4,22 +4,20 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.snapshot.Snapshot; -import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataAuditOperation; +import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EntityEventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a - * new & previous Entity {@link Snapshot}. - * @param urn the urn associated with the entity changed + * Produces a {@link com.linkedin.mxe.MetadataAuditEvent} from a new & previous Entity {@link + * Snapshot}. + * + * @param urn the urn associated with the entity changed * @param oldSnapshot a {@link RecordTemplate} corresponding to the old snapshot. * @param newSnapshot a {@link RecordTemplate} corresponding to the new snapshot. * @param oldSystemMetadata @@ -31,12 +29,10 @@ void produceMetadataAuditEvent( @Nonnull final Snapshot newSnapshot, @Nullable SystemMetadata oldSystemMetadata, @Nullable SystemMetadata newSystemMetadata, - MetadataAuditOperation operation - ); + MetadataAuditOperation operation); /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated @@ -45,6 +41,5 @@ void produceMetadataAuditEvent( void produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java index ffadc07124727..a809c7f9a3e31 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/event/EventProducer.java @@ -12,61 +12,52 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. - */ +/** Interface implemented by producers of {@link com.linkedin.mxe.MetadataAuditEvent}s. */ public interface EventProducer { /** - * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a - * new & previous aspect. + * Produces a {@link com.linkedin.mxe.MetadataChangeLog} from a new & previous aspect. * * @param urn the urn associated with the entity changed * @param aspectSpec aspect spec of the aspect being updated * @param metadataChangeLog metadata change log to push into MCL kafka topic - * * @return A {@link Future} object that reports when the message has been produced. */ Future produceMetadataChangeLog( @Nonnull final Urn urn, @Nonnull AspectSpec aspectSpec, - @Nonnull final MetadataChangeLog metadataChangeLog - ); + @Nonnull final MetadataChangeLog metadataChangeLog); /** * Produces a {@link com.linkedin.mxe.MetadataChangeProposal} as an async update to an entity * * @param urn the urn associated with the change proposal. * @param metadataChangeProposal metadata change proposal to push into MCP kafka topic. - * * @return A {@link Future} object that reports when the message has been produced. */ @WithSpan - Future produceMetadataChangeProposal(@Nonnull final Urn urn, - @Nonnull MetadataChangeProposal metadataChangeProposal); + Future produceMetadataChangeProposal( + @Nonnull final Urn urn, @Nonnull MetadataChangeProposal metadataChangeProposal); /** * Produces a generic platform "event". * - * @param name the name, or type, of the event to produce, as defined in the {@link EntityRegistry}. - * @param key an optional partitioning key for the event. If not provided, the name of the event will be used. - * @param payload the event payload itself. This will be serialized to JSON and produced as a system event. - * + * @param name the name, or type, of the event to produce, as defined in the {@link + * EntityRegistry}. + * @param key an optional partitioning key for the event. If not provided, the name of the event + * will be used. + * @param payload the event payload itself. This will be serialized to JSON and produced as a + * system event. * @return A {@link Future} object that reports when the message has been produced. */ Future producePlatformEvent( - @Nonnull String name, - @Nullable String key, - @Nonnull PlatformEvent payload - ); + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent payload); /** - * Creates an entry on the history log of when the indices were last rebuilt with the latest configuration. + * Creates an entry on the history log of when the indices were last rebuilt with the latest + * configuration. * * @param event the history event to send to the DataHub Upgrade history topic */ - void produceDataHubUpgradeHistoryEvent( - @Nonnull DataHubUpgradeHistoryEvent event - ); + void produceDataHubUpgradeHistoryEvent(@Nonnull DataHubUpgradeHistoryEvent event); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java index 891844045b016..c54ba4a222b73 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/JavaGraphClient.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; + import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; import com.linkedin.common.EntityRelationships; @@ -14,54 +16,60 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; - - @Slf4j public class JavaGraphClient implements GraphClient { GraphService _graphService; + public JavaGraphClient(@Nonnull GraphService graphService) { this._graphService = graphService; } /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull @Override - public EntityRelationships getRelatedEntities(String rawUrn, List relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count, String actor) { + public EntityRelationships getRelatedEntities( + String rawUrn, + List relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count, + String actor) { start = start == null ? 0 : start; count = count == null ? DEFAULT_PAGE_SIZE : count; RelatedEntitiesResult relatedEntitiesResult = - _graphService.findRelatedEntities(null, + _graphService.findRelatedEntities( + null, QueryUtils.newFilter("urn", rawUrn), null, EMPTY_FILTER, relationshipTypes, QueryUtils.newRelationshipFilter(EMPTY_FILTER, direction), start, - count - ); + count); - final EntityRelationshipArray entityArray = new EntityRelationshipArray( - relatedEntitiesResult.getEntities().stream().map( - entity -> { - try { - return new EntityRelationship() - .setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - } - ).collect(Collectors.toList()) - ); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); return new EntityRelationships() .setStart(relatedEntitiesResult.getStart()) @@ -71,14 +79,23 @@ public EntityRelationships getRelatedEntities(String rawUrn, List relati } /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull @Override - public EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor) { - return _graphService.getLineage(UrnUtils.getUrn(rawUrn), direction, start != null ? start : 0, - count != null ? count : 100, maxHops); + public EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor) { + return _graphService.getLineage( + UrnUtils.getUrn(rawUrn), + direction, + start != null ? start : 0, + count != null ? count : 100, + maxHops); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java index 7a2f0825b31cc..bdf405fe36c07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.Siblings; @@ -20,9 +22,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class SiblingGraphService { @@ -31,58 +30,55 @@ public class SiblingGraphService { private final GraphService _graphService; @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { - return ValidationUtils.validateEntityLineageResult(getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - false, - new HashSet<>(), - null, - null), - _entityService); + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { + return ValidationUtils.validateEntityLineageResult( + getLineage( + entityUrn, direction, offset, count, maxHops, false, new HashSet<>(), null, null), + _entityService); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + *

Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - int offset, int count, int maxHops, boolean separateSiblings, @Nonnull Set visitedUrns, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + boolean separateSiblings, + @Nonnull Set visitedUrns, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { if (separateSiblings) { - return ValidationUtils.validateEntityLineageResult(_graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis), _entityService); + return ValidationUtils.validateEntityLineageResult( + _graphService.getLineage( + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis), + _entityService); } if (maxHops > 1) { throw new UnsupportedOperationException( - String.format("More than 1 hop is not supported for %s", this.getClass().getSimpleName())); + String.format( + "More than 1 hop is not supported for %s", this.getClass().getSimpleName())); } EntityLineageResult entityLineage = _graphService.getLineage( - entityUrn, - direction, - offset, - count, - maxHops, - startTimeMillis, - endTimeMillis); + entityUrn, direction, offset, count, maxHops, startTimeMillis, endTimeMillis); - Siblings siblingAspectOfEntity = (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + Siblings siblingAspectOfEntity = + (Siblings) _entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); // if you have siblings, we want to fetch their lineage too and merge it in if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { @@ -104,19 +100,23 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi if (visitedUrns.contains(siblingUrn)) { continue; } - // need to call siblingGraphService to get sibling results for this sibling entity in case there is more than one sibling - EntityLineageResult nextEntityLineage = filterLineageResultFromSiblings(siblingUrn, allSiblingsInGroup, - getLineage( + // need to call siblingGraphService to get sibling results for this sibling entity in case + // there is more than one sibling + EntityLineageResult nextEntityLineage = + filterLineageResultFromSiblings( siblingUrn, - direction, - offset, - count, - maxHops, - false, - visitedUrns, - startTimeMillis, - endTimeMillis), - entityLineage); + allSiblingsInGroup, + getLineage( + siblingUrn, + direction, + offset, + count, + maxHops, + false, + visitedUrns, + startTimeMillis, + endTimeMillis), + entityLineage); // Update offset and count to fetch the correct number of edges from the next sibling node offset = Math.max(0, offset - nextEntityLineage.getTotal()); @@ -124,86 +124,116 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi entityLineage.setFiltered(getFiltered(entityLineage) + getFiltered(nextEntityLineage)); entityLineage = nextEntityLineage; - }; + } + ; } return ValidationUtils.validateEntityLineageResult(entityLineage, _entityService); } private int getFiltered(@Nullable EntityLineageResult entityLineageResult) { - return (entityLineageResult != null && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0); + return (entityLineageResult != null && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0); } - // takes a lineage result and removes any nodes that are siblings of some other node already in the result + // takes a lineage result and removes any nodes that are siblings of some other node already in + // the result private EntityLineageResult filterLineageResultFromSiblings( @Nonnull final Urn urn, @Nonnull final Set allSiblingsInGroup, @Nonnull final EntityLineageResult entityLineageResult, - @Nullable final EntityLineageResult existingResult - ) { + @Nullable final EntityLineageResult existingResult) { int numFiltered = 0; // 1) remove the source entities siblings from this entity's downstreams - final Map> partitionedFilteredRelationships = entityLineageResult.getRelationships() - .stream().collect(Collectors.partitioningBy( - lineageRelationship -> !allSiblingsInGroup.contains(lineageRelationship.getEntity()) - || lineageRelationship.getEntity().equals(urn))); + final Map> partitionedFilteredRelationships = + entityLineageResult.getRelationships().stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !allSiblingsInGroup.contains(lineageRelationship.getEntity()) + || lineageRelationship.getEntity().equals(urn))); numFiltered += partitionedFilteredRelationships.get(Boolean.FALSE).size(); - final List filteredRelationships = partitionedFilteredRelationships.get(Boolean.TRUE); + final List filteredRelationships = + partitionedFilteredRelationships.get(Boolean.TRUE); // 2) filter out existing lineage to avoid duplicates in our combined result - final Set existingUrns = existingResult != null - ? existingResult.getRelationships().stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()) - : new HashSet<>(); - - Map> partitionedUniqueFilteredRelationships = filteredRelationships.stream().collect( - Collectors.partitioningBy(lineageRelationship -> !existingUrns.contains(lineageRelationship.getEntity()))); + final Set existingUrns = + existingResult != null + ? existingResult.getRelationships().stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toSet()) + : new HashSet<>(); + + Map> partitionedUniqueFilteredRelationships = + filteredRelationships.stream() + .collect( + Collectors.partitioningBy( + lineageRelationship -> + !existingUrns.contains(lineageRelationship.getEntity()))); numFiltered += partitionedUniqueFilteredRelationships.get(Boolean.FALSE).size(); - List uniqueFilteredRelationships = partitionedUniqueFilteredRelationships.get(Boolean.TRUE); + List uniqueFilteredRelationships = + partitionedUniqueFilteredRelationships.get(Boolean.TRUE); // 3) combine this entity's lineage with the lineage we've already seen - final List combinedResults = Stream.concat( - uniqueFilteredRelationships.stream(), - existingResult != null ? existingResult.getRelationships().stream() : ImmutableList.of().stream()) - .collect(Collectors.toList()); + final List combinedResults = + Stream.concat( + uniqueFilteredRelationships.stream(), + existingResult != null + ? existingResult.getRelationships().stream() + : ImmutableList.of().stream()) + .collect(Collectors.toList()); // 4) fetch the siblings of each lineage result - final Set combinedResultUrns = combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); + final Set combinedResultUrns = + combinedResults.stream().map(LineageRelationship::getEntity).collect(Collectors.toSet()); final Map> siblingAspects = _entityService.getLatestAspects(combinedResultUrns, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); - // 5) if you are not primary & your sibling is in the results, filter yourself out of the return set - Map> partitionedFilteredSiblings = combinedResults.stream().collect(Collectors.partitioningBy(result -> { - Optional optionalSiblingsAspect = siblingAspects.get(result.getEntity()).stream().filter( - aspect -> aspect instanceof Siblings - ).findAny(); - - if (optionalSiblingsAspect.isEmpty()) { - return true; - } - - final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); - - if (siblingsAspect.isPrimary()) { - return true; - } - - // if you are not primary and your sibling exists in the result set, filter yourself out - return siblingsAspect.getSiblings().stream().noneMatch(combinedResultUrns::contains); - })); + // 5) if you are not primary & your sibling is in the results, filter yourself out of the return + // set + Map> partitionedFilteredSiblings = + combinedResults.stream() + .collect( + Collectors.partitioningBy( + result -> { + Optional optionalSiblingsAspect = + siblingAspects.get(result.getEntity()).stream() + .filter(aspect -> aspect instanceof Siblings) + .findAny(); + + if (optionalSiblingsAspect.isEmpty()) { + return true; + } + + final Siblings siblingsAspect = (Siblings) optionalSiblingsAspect.get(); + + if (siblingsAspect.isPrimary()) { + return true; + } + + // if you are not primary and your sibling exists in the result set, filter + // yourself out + return siblingsAspect.getSiblings().stream() + .noneMatch(combinedResultUrns::contains); + })); numFiltered += partitionedFilteredSiblings.get(Boolean.FALSE).size(); uniqueFilteredRelationships = partitionedFilteredSiblings.get(Boolean.TRUE); EntityLineageResult combinedLineageResult = new EntityLineageResult(); combinedLineageResult.setStart(entityLineageResult.getStart()); - combinedLineageResult.setRelationships(new LineageRelationshipArray(uniqueFilteredRelationships)); - combinedLineageResult.setTotal(entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); + combinedLineageResult.setRelationships( + new LineageRelationshipArray(uniqueFilteredRelationships)); + combinedLineageResult.setTotal( + entityLineageResult.getTotal() + (existingResult != null ? existingResult.getTotal() : 0)); combinedLineageResult.setCount(uniqueFilteredRelationships.size()); - combinedLineageResult.setFiltered(numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); + combinedLineageResult.setFiltered( + numFiltered + getFiltered(existingResult) + getFiltered(entityLineageResult)); return ValidationUtils.validateEntityLineageResult(combinedLineageResult, _entityService); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java index dcef0f9f192ed..393297b64e0d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphExecutor.java @@ -6,93 +6,96 @@ import io.github.resilience4j.retry.Retry; import io.github.resilience4j.retry.RetryConfig; import io.grpc.StatusRuntimeException; -import lombok.extern.slf4j.Slf4j; - import java.time.Duration; import java.util.concurrent.ExecutionException; import java.util.function.Consumer; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DgraphExecutor { - // requests are retried with an exponential randomized backoff - // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% - private static final Duration INITIAL_DURATION = Duration.ofMillis(10); - private static final Duration MAX_DURATION = Duration.ofSeconds(10); - private static final double BACKOFF_MULTIPLIER = 2.0; - private static final double RANDOMIZATION_FACTOR = 0.5; + // requests are retried with an exponential randomized backoff + // wait 0.01s, 0.02s, 0.04s, 0.08s, ..., 10s, all ±50% + private static final Duration INITIAL_DURATION = Duration.ofMillis(10); + private static final Duration MAX_DURATION = Duration.ofSeconds(10); + private static final double BACKOFF_MULTIPLIER = 2.0; + private static final double RANDOMIZATION_FACTOR = 0.5; - private final DgraphClient _client; - private final Retry _retry; + private final DgraphClient _client; + private final Retry _retry; - public DgraphExecutor(DgraphClient client, int maxAttempts) { - this._client = client; + public DgraphExecutor(DgraphClient client, int maxAttempts) { + this._client = client; - RetryConfig config = RetryConfig.custom() - .intervalFunction(IntervalFunction.ofExponentialRandomBackoff(INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) - .retryOnException(DgraphExecutor::isRetryableException) - .failAfterMaxAttempts(true) - .maxAttempts(maxAttempts) - .build(); - this._retry = Retry.of("DgraphExecutor", config); - } + RetryConfig config = + RetryConfig.custom() + .intervalFunction( + IntervalFunction.ofExponentialRandomBackoff( + INITIAL_DURATION, BACKOFF_MULTIPLIER, RANDOMIZATION_FACTOR, MAX_DURATION)) + .retryOnException(DgraphExecutor::isRetryableException) + .failAfterMaxAttempts(true) + .maxAttempts(maxAttempts) + .build(); + this._retry = Retry.of("DgraphExecutor", config); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @param return type of the function - * @return return value of the function - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public T executeFunction(Function func) { - return Retry.decorateFunction(this._retry, func).apply(_client); - } + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @param return type of the function + * @return return value of the function + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public T executeFunction(Function func) { + return Retry.decorateFunction(this._retry, func).apply(_client); + } - /** - * Executes the given DgraphClient call and retries retry-able exceptions. - * Subsequent executions will experience an exponential randomized backoff. - * - * @param func call on the provided DgraphClient - * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded - */ - public void executeConsumer(Consumer func) { - this._retry.executeSupplier(() -> { - func.accept(_client); - return null; + /** + * Executes the given DgraphClient call and retries retry-able exceptions. Subsequent executions + * will experience an exponential randomized backoff. + * + * @param func call on the provided DgraphClient + * @throws io.github.resilience4j.retry.MaxRetriesExceeded if max attempts exceeded + */ + public void executeConsumer(Consumer func) { + this._retry.executeSupplier( + () -> { + func.accept(_client); + return null; }); - } + } - /** - * Defines which DgraphClient exceptions are being retried. - * - * @param t exception from DgraphClient - * @return true if this exception can be retried - */ - private static boolean isRetryableException(Throwable t) { - // unwrap RuntimeException and ExecutionException - while (true) { - if ((t instanceof RuntimeException || t instanceof ExecutionException) && t.getCause() != null) { - t = t.getCause(); - continue; - } - break; - } + /** + * Defines which DgraphClient exceptions are being retried. + * + * @param t exception from DgraphClient + * @return true if this exception can be retried + */ + private static boolean isRetryableException(Throwable t) { + // unwrap RuntimeException and ExecutionException + while (true) { + if ((t instanceof RuntimeException || t instanceof ExecutionException) + && t.getCause() != null) { + t = t.getCause(); + continue; + } + break; + } - // retry-able exceptions - if (t instanceof TxnConflictException - || t instanceof StatusRuntimeException && ( - t.getMessage().contains("operation opIndexing is already running") - || t.getMessage().contains("Please retry") - || t.getMessage().contains("DEADLINE_EXCEEDED:") - || t.getMessage().contains("context deadline exceeded") - || t.getMessage().contains("Only leader can decide to commit or abort") - )) { - log.debug("retrying request due to {}", t.getMessage()); - return true; - } - return false; + // retry-able exceptions + if (t instanceof TxnConflictException + || t instanceof StatusRuntimeException + && (t.getMessage().contains("operation opIndexing is already running") + || t.getMessage().contains("Please retry") + || t.getMessage().contains("DEADLINE_EXCEEDED:") + || t.getMessage().contains("context deadline exceeded") + || t.getMessage().contains("Only leader can decide to commit or abort"))) { + log.debug("retrying request due to {}", t.getMessage()); + return true; } + return false; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java index 14a9a17401702..0d8b7655fddeb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphGraphService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; @@ -41,665 +43,740 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DgraphGraphService implements GraphService { - // calls to Dgraph cluster will be retried if they throw retry-able exceptions - // with a max number of attempts of 160 a call will finally fail after around 15 minutes - private static final int MAX_ATTEMPTS = 160; - - private final @Nonnull DgraphExecutor _dgraph; - private final @Nonnull LineageRegistry _lineageRegistry; - - private static final String URN_RELATIONSHIP_TYPE = "urn"; - private static final String TYPE_RELATIONSHIP_TYPE = "type"; - private static final String KEY_RELATIONSHIP_TYPE = "key"; - - - @Getter(lazy = true) - // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing _schema - private final DgraphSchema _schema = getSchema(); - - public DgraphGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { - _lineageRegistry = lineageRegistry; - this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); - } - - protected @Nonnull DgraphSchema getSchema() { - Response response = _dgraph.executeFunction(dgraphClient -> - dgraphClient.newReadOnlyTransaction().doRequest( - Request.newBuilder().setQuery("schema { predicate }").build() - ) - ); - DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); - - if (schema.isEmpty()) { - Operation setSchema = Operation.newBuilder() - .setSchema("" - + ": string @index(hash) @upsert .\n" - + ": string @index(hash) .\n" - + ": string @index(hash) .\n" - ) - .build(); - _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - return schema; + // calls to Dgraph cluster will be retried if they throw retry-able exceptions + // with a max number of attempts of 160 a call will finally fail after around 15 minutes + private static final int MAX_ATTEMPTS = 160; + + private final @Nonnull DgraphExecutor _dgraph; + private final @Nonnull LineageRegistry _lineageRegistry; + + private static final String URN_RELATIONSHIP_TYPE = "urn"; + private static final String TYPE_RELATIONSHIP_TYPE = "type"; + private static final String KEY_RELATIONSHIP_TYPE = "key"; + + @Getter(lazy = true) + // we want to defer initialization of schema (accessing Dgraph server) to the first time accessing + // _schema + private final DgraphSchema _schema = getSchema(); + + public DgraphGraphService( + @Nonnull LineageRegistry lineageRegistry, @Nonnull DgraphClient client) { + _lineageRegistry = lineageRegistry; + this._dgraph = new DgraphExecutor(client, MAX_ATTEMPTS); + } + + protected @Nonnull DgraphSchema getSchema() { + Response response = + _dgraph.executeFunction( + dgraphClient -> + dgraphClient + .newReadOnlyTransaction() + .doRequest(Request.newBuilder().setQuery("schema { predicate }").build())); + DgraphSchema schema = getSchema(response.getJson().toStringUtf8()).withDgraph(_dgraph); + + if (schema.isEmpty()) { + Operation setSchema = + Operation.newBuilder() + .setSchema( + "" + + ": string @index(hash) @upsert .\n" + + ": string @index(hash) .\n" + + ": string @index(hash) .\n") + .build(); + _dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { - Map data = getDataFromResponseJson(json); - - Object schemaObj = data.get("schema"); - if (!(schemaObj instanceof List)) { - log.info("The result from Dgraph did not contain a 'schema' field, or that field is not a List"); - return DgraphSchema.empty(); - } - - List schemaList = (List) schemaObj; - Set fieldNames = schemaList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map)) { - return Stream.empty(); - } - - Map fieldMap = (Map) fieldObj; - if (!(fieldMap.containsKey("predicate") && fieldMap.get("predicate") instanceof String)) { - return Stream.empty(); - } - - String fieldName = (String) fieldMap.get("predicate"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); + return schema; + } - Object typesObj = data.get("types"); - if (!(typesObj instanceof List)) { - log.info("The result from Dgraph did not contain a 'types' field, or that field is not a List"); - return DgraphSchema.empty(); - } + protected static @Nonnull DgraphSchema getSchema(@Nonnull String json) { + Map data = getDataFromResponseJson(json); - List types = (List) typesObj; - Map> typeFields = types.stream().flatMap(typeObj -> { - if (!(typeObj instanceof Map)) { - return Stream.empty(); - } + Object schemaObj = data.get("schema"); + if (!(schemaObj instanceof List)) { + log.info( + "The result from Dgraph did not contain a 'schema' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Map typeMap = (Map) typeObj; - if (!(typeMap.containsKey("fields") - && typeMap.containsKey("name") - && typeMap.get("fields") instanceof List - && typeMap.get("name") instanceof String)) { - return Stream.empty(); - } + List schemaList = (List) schemaObj; + Set fieldNames = + schemaList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map)) { + return Stream.empty(); + } - String typeName = (String) typeMap.get("name"); - List fieldsList = (List) typeMap.get("fields"); + Map fieldMap = (Map) fieldObj; + if (!(fieldMap.containsKey("predicate") + && fieldMap.get("predicate") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("predicate"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + + Object typesObj = data.get("types"); + if (!(typesObj instanceof List)) { + log.info( + "The result from Dgraph did not contain a 'types' field, or that field is not a List"); + return DgraphSchema.empty(); + } - Set fields = fieldsList.stream().flatMap(fieldObj -> { - if (!(fieldObj instanceof Map)) { + List types = (List) typesObj; + Map> typeFields = + types.stream() + .flatMap( + typeObj -> { + if (!(typeObj instanceof Map)) { return Stream.empty(); - } + } - Map fieldMap = (Map) fieldObj; - if (!(fieldMap.containsKey("name") && fieldMap.get("name") instanceof String)) { + Map typeMap = (Map) typeObj; + if (!(typeMap.containsKey("fields") + && typeMap.containsKey("name") + && typeMap.get("fields") instanceof List + && typeMap.get("name") instanceof String)) { return Stream.empty(); - } + } + + String typeName = (String) typeMap.get("name"); + List fieldsList = (List) typeMap.get("fields"); + + Set fields = + fieldsList.stream() + .flatMap( + fieldObj -> { + if (!(fieldObj instanceof Map)) { + return Stream.empty(); + } + + Map fieldMap = (Map) fieldObj; + if (!(fieldMap.containsKey("name") + && fieldMap.get("name") instanceof String)) { + return Stream.empty(); + } + + String fieldName = (String) fieldMap.get("name"); + return Stream.of(fieldName); + }) + .filter(f -> !f.startsWith("dgraph.")) + .collect(Collectors.toSet()); + return Stream.of(Pair.of(typeName, fields)); + }) + .filter(t -> !t.getKey().startsWith("dgraph.")) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + + return new DgraphSchema(fieldNames, typeFields); + } + + @Override + public LineageRegistry getLineageRegistry() { + return _lineageRegistry; + } + + @Override + public void addEdge(Edge edge) { + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); + + // add the relationship type to the schema + // TODO: translate edge name to allowed dgraph uris + String sourceEntityType = getDgraphType(edge.getSource()); + String relationshipType = edge.getRelationshipType(); + get_schema() + .ensureField( + sourceEntityType, + relationshipType, + URN_RELATIONSHIP_TYPE, + TYPE_RELATIONSHIP_TYPE, + KEY_RELATIONSHIP_TYPE); + + // lookup the source and destination nodes + // TODO: add escape for string values + String query = + String.format( + "query {\n" + + " src as var(func: eq(urn, \"%s\"))\n" + + " dst as var(func: eq(urn, \"%s\"))\n" + + "}", + edge.getSource(), edge.getDestination()); + String srcVar = "uid(src)"; + String dstVar = "uid(dst)"; + + // edge case: source and destination are same node + if (edge.getSource().equals(edge.getDestination())) { + query = + String.format( + "query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", edge.getSource()); + srcVar = "uid(node)"; + dstVar = "uid(node)"; + } + + // create source and destination nodes if they do not exist + // and create the new edge between them + // TODO: add escape for string values + // TODO: translate edge name to allowed dgraph uris + StringJoiner mutations = new StringJoiner("\n"); + mutations.add( + String.format("%s \"%s\" .", srcVar, getDgraphType(edge.getSource()))); + mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource())); + mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource().getEntityType())); + mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource().getEntityKey())); + if (!edge.getSource().equals(edge.getDestination())) { + mutations.add( + String.format("%s \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); + mutations.add(String.format("%s \"%s\" .", dstVar, edge.getDestination())); + mutations.add( + String.format("%s \"%s\" .", dstVar, edge.getDestination().getEntityType())); + mutations.add( + String.format("%s \"%s\" .", dstVar, edge.getDestination().getEntityKey())); + } + mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); + + log.debug("Query: " + query); + log.debug("Mutations: " + mutations); + + // construct the upsert + Mutation mutation = + Mutation.newBuilder().setSetNquads(ByteString.copyFromUtf8(mutations.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + // run the request + _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + } + + private static @Nonnull String getDgraphType(@Nonnull Urn urn) { + return urn.getNamespace() + ":" + urn.getEntityType(); + } + + // Returns reversed and directed relationship types: + // returns <~rel> on outgoing and on incoming and both on undirected + private static List getDirectedRelationshipTypes( + List relationships, RelationshipDirection direction) { + + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + List outgoingRelationships = + relationships.stream().map(type -> "~" + type).collect(Collectors.toList()); + + if (direction == RelationshipDirection.OUTGOING) { + return outgoingRelationships; + } else { + relationships = new ArrayList<>(relationships); + relationships.addAll(outgoingRelationships); + } + } - String fieldName = (String) fieldMap.get("name"); - return Stream.of(fieldName); - }).filter(f -> !f.startsWith("dgraph.")).collect(Collectors.toSet()); - return Stream.of(Pair.of(typeName, fields)); - }).filter(t -> !t.getKey().startsWith("dgraph.")).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + // we need to remove duplicates in order to not cause invalid queries in dgraph + return new ArrayList<>(new LinkedHashSet(relationships)); + } + + protected static String getQueryForRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + if (relationshipTypes.isEmpty()) { + // we would have to construct a query that never returns any results + // just do not call this method in the first place + throw new IllegalArgumentException("The relationship types must not be empty"); + } - return new DgraphSchema(fieldNames, typeFields); + if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support criteria in source or destination entity filter"); } - @Override - public LineageRegistry getLineageRegistry() { - return _lineageRegistry; + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 + || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); } - @Override - public void addEdge(Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); - - // add the relationship type to the schema - // TODO: translate edge name to allowed dgraph uris - String sourceEntityType = getDgraphType(edge.getSource()); - String relationshipType = edge.getRelationshipType(); - get_schema().ensureField(sourceEntityType, relationshipType, URN_RELATIONSHIP_TYPE, TYPE_RELATIONSHIP_TYPE, KEY_RELATIONSHIP_TYPE); - - // lookup the source and destination nodes - // TODO: add escape for string values - String query = String.format("query {\n" - + " src as var(func: eq(urn, \"%s\"))\n" - + " dst as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource(), edge.getDestination()); - String srcVar = "uid(src)"; - String dstVar = "uid(dst)"; - - // edge case: source and destination are same node - if (edge.getSource().equals(edge.getDestination())) { - query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", edge.getSource()); - srcVar = "uid(node)"; - dstVar = "uid(node)"; - } - - // create source and destination nodes if they do not exist - // and create the new edge between them - // TODO: add escape for string values - // TODO: translate edge name to allowed dgraph uris - StringJoiner mutations = new StringJoiner("\n"); - mutations.add(String.format("%s \"%s\" .", srcVar, getDgraphType(edge.getSource()))); - mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource())); - mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource().getEntityType())); - mutations.add(String.format("%s \"%s\" .", srcVar, edge.getSource().getEntityKey())); - if (!edge.getSource().equals(edge.getDestination())) { - mutations.add(String.format("%s \"%s\" .", dstVar, getDgraphType(edge.getDestination()))); - mutations.add(String.format("%s \"%s\" .", dstVar, edge.getDestination())); - mutations.add(String.format("%s \"%s\" .", dstVar, edge.getDestination().getEntityType())); - mutations.add(String.format("%s \"%s\" .", dstVar, edge.getDestination().getEntityKey())); - } - mutations.add(String.format("%s <%s> %s .", srcVar, edge.getRelationshipType(), dstVar)); - - log.debug("Query: " + query); - log.debug("Mutations: " + mutations); - - // construct the upsert - Mutation mutation = Mutation.newBuilder() - .setSetNquads(ByteString.copyFromUtf8(mutations.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - // run the request - _dgraph.executeFunction(client -> client.newTransaction().doRequest(request)); + //noinspection ConstantConditions + if (relationshipFilter.hasCriteria() + || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { + throw new IllegalArgumentException( + "The DgraphGraphService does not support any criteria for the relationship filter"); } - private static @Nonnull String getDgraphType(@Nonnull Urn urn) { - return urn.getNamespace() + ":" + urn.getEntityType(); + // We are not querying for and return + // but we reverse the relationship and query for <~relationship> + // this guarantees there are no duplicates among the returned s + final List directedRelationshipTypes = + getDirectedRelationshipTypes(relationshipTypes, relationshipFilter.getDirection()); + + List filters = new ArrayList<>(); + + Set destinationNodeFilterNames = new HashSet<>(); + String sourceTypeFilterName = null; + String destinationTypeFilterName = null; + List sourceFilterNames = new ArrayList<>(); + List destinationFilterNames = new ArrayList<>(); + List relationshipTypeFilterNames = new ArrayList<>(); + + if (sourceTypes != null && sourceTypes.size() > 0) { + sourceTypeFilterName = "sourceType"; + // TODO: escape string value + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + sourceTypes.forEach(type -> joiner.add(type)); + filters.add( + String.format( + "%s as var(func: eq(, %s))", sourceTypeFilterName, joiner.toString())); } - // Returns reversed and directed relationship types: - // returns <~rel> on outgoing and on incoming and both on undirected - private static List getDirectedRelationshipTypes(List relationships, - RelationshipDirection direction) { - - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - List outgoingRelationships = relationships.stream() - .map(type -> "~" + type).collect(Collectors.toList()); - - if (direction == RelationshipDirection.OUTGOING) { - return outgoingRelationships; - } else { - relationships = new ArrayList<>(relationships); - relationships.addAll(outgoingRelationships); - } - } - - // we need to remove duplicates in order to not cause invalid queries in dgraph - return new ArrayList<>(new LinkedHashSet(relationships)); + if (destinationTypes != null && destinationTypes.size() > 0) { + destinationTypeFilterName = "destinationType"; + final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); + destinationTypes.forEach(type -> joiner.add(type)); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: eq(, %s))", destinationTypeFilterName, joiner.toString())); } - protected static String getQueryForRelatedEntities(@Nullable List sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - if (relationshipTypes.isEmpty()) { - // we would have to construct a query that never returns any results - // just do not call this method in the first place - throw new IllegalArgumentException("The relationship types must not be empty"); - } - - - if (sourceEntityFilter.hasCriteria() || destinationEntityFilter.hasCriteria()) { - throw new IllegalArgumentException("The DgraphGraphService does not support criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() > 1 - || destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() > 1) { - throw new IllegalArgumentException("The DgraphGraphService does not support multiple OR criteria in source or destination entity filter"); - } - - //noinspection ConstantConditions - if (relationshipFilter.hasCriteria() || relationshipFilter.hasOr() && relationshipFilter.getOr().size() > 0) { - throw new IllegalArgumentException("The DgraphGraphService does not support any criteria for the relationship filter"); - } - - // We are not querying for and return - // but we reverse the relationship and query for <~relationship> - // this guarantees there are no duplicates among the returned s - final List directedRelationshipTypes = getDirectedRelationshipTypes( - relationshipTypes, relationshipFilter.getDirection() - ); - - List filters = new ArrayList<>(); - - Set destinationNodeFilterNames = new HashSet<>(); - String sourceTypeFilterName = null; - String destinationTypeFilterName = null; - List sourceFilterNames = new ArrayList<>(); - List destinationFilterNames = new ArrayList<>(); - List relationshipTypeFilterNames = new ArrayList<>(); - - if (sourceTypes != null && sourceTypes.size() > 0) { - sourceTypeFilterName = "sourceType"; - // TODO: escape string value - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - sourceTypes.forEach(type -> joiner.add(type)); - filters.add(String.format("%s as var(func: eq(, %s))", sourceTypeFilterName, joiner.toString())); - } - - if (destinationTypes != null && destinationTypes.size() > 0) { - destinationTypeFilterName = "destinationType"; - final StringJoiner joiner = new StringJoiner("\",\"", "[\"", "\"]"); - destinationTypes.forEach(type -> joiner.add(type)); - // TODO: escape string value - filters.add(String.format("%s as var(func: eq(, %s))", destinationTypeFilterName, joiner.toString())); - } - - //noinspection ConstantConditions - if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { - CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, sourceCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "sourceFilter" + (idx + 1); - sourceFilterNames.add(sourceFilterName); - Criterion criterion = sourceCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - //noinspection ConstantConditions - if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { - CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); - IntStream.range(0, destinationCriteria.size()) - .forEach(idx -> { - String sourceFilterName = "destinationFilter" + (idx + 1); - destinationFilterNames.add(sourceFilterName); - Criterion criterion = destinationCriteria.get(idx); - // TODO: escape field name and string value - filters.add(String.format("%s as var(func: eq(<%s>, \"%s\"))", sourceFilterName, criterion.getField(), criterion.getValue())); - }); - } - - IntStream.range(0, directedRelationshipTypes.size()) - .forEach(idx -> { - String relationshipTypeFilterName = "relationshipType" + (idx + 1); - relationshipTypeFilterNames.add(relationshipTypeFilterName); - // TODO: escape string value - filters.add(String.format("%s as var(func: has(<%s>))", relationshipTypeFilterName, directedRelationshipTypes.get(idx))); - }); - - // the destination node filter is the first filter that is being applied on the destination node - // we can add multiple filters, they will combine as OR - if (destinationTypeFilterName != null) { - destinationNodeFilterNames.add(destinationTypeFilterName); - } - destinationNodeFilterNames.addAll(destinationFilterNames); - destinationNodeFilterNames.addAll(relationshipTypeFilterNames); - - StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); - destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); - String destinationNodeFilter = destinationNodeFilterJoiner.toString(); - - String filterConditions = getFilterConditions( - sourceTypeFilterName, destinationTypeFilterName, - sourceFilterNames, destinationFilterNames, - relationshipTypeFilterNames, directedRelationshipTypes - ); - - StringJoiner relationshipsJoiner = new StringJoiner("\n "); - getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) - .forEach(relationshipsJoiner::add); - String relationships = relationshipsJoiner.toString(); - - StringJoiner filterJoiner = new StringJoiner("\n "); - filters.forEach(filterJoiner::add); - String filterExpressions = filterJoiner.toString(); - - return String.format("query {\n" - + " %s\n" - + "\n" - + " result (func: uid(%s), first: %d, offset: %d) %s {\n" - + " \n" - + " %s\n" - + " }\n" - + "}", - filterExpressions, - destinationNodeFilter, - count, offset, - filterConditions, - relationships); + //noinspection ConstantConditions + if (sourceEntityFilter.hasOr() && sourceEntityFilter.getOr().size() == 1) { + CriterionArray sourceCriteria = sourceEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, sourceCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "sourceFilter" + (idx + 1); + sourceFilterNames.add(sourceFilterName); + Criterion criterion = sourceCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void upsertEdge(final Edge edge) { - throw new UnsupportedOperationException("Upsert edge not supported by Neo4JGraphService at this time."); + //noinspection ConstantConditions + if (destinationEntityFilter.hasOr() && destinationEntityFilter.getOr().size() == 1) { + CriterionArray destinationCriteria = destinationEntityFilter.getOr().get(0).getAnd(); + IntStream.range(0, destinationCriteria.size()) + .forEach( + idx -> { + String sourceFilterName = "destinationFilter" + (idx + 1); + destinationFilterNames.add(sourceFilterName); + Criterion criterion = destinationCriteria.get(idx); + // TODO: escape field name and string value + filters.add( + String.format( + "%s as var(func: eq(<%s>, \"%s\"))", + sourceFilterName, criterion.getField(), criterion.getValue())); + }); } - @Override - public void removeEdge(final Edge edge) { - throw new UnsupportedOperationException("Remove edge not supported by DgraphGraphService at this time."); + IntStream.range(0, directedRelationshipTypes.size()) + .forEach( + idx -> { + String relationshipTypeFilterName = "relationshipType" + (idx + 1); + relationshipTypeFilterNames.add(relationshipTypeFilterName); + // TODO: escape string value + filters.add( + String.format( + "%s as var(func: has(<%s>))", + relationshipTypeFilterName, directedRelationshipTypes.get(idx))); + }); + + // the destination node filter is the first filter that is being applied on the destination node + // we can add multiple filters, they will combine as OR + if (destinationTypeFilterName != null) { + destinationNodeFilterNames.add(destinationTypeFilterName); + } + destinationNodeFilterNames.addAll(destinationFilterNames); + destinationNodeFilterNames.addAll(relationshipTypeFilterNames); + + StringJoiner destinationNodeFilterJoiner = new StringJoiner(", "); + destinationNodeFilterNames.stream().sorted().forEach(destinationNodeFilterJoiner::add); + String destinationNodeFilter = destinationNodeFilterJoiner.toString(); + + String filterConditions = + getFilterConditions( + sourceTypeFilterName, destinationTypeFilterName, + sourceFilterNames, destinationFilterNames, + relationshipTypeFilterNames, directedRelationshipTypes); + + StringJoiner relationshipsJoiner = new StringJoiner("\n "); + getRelationships(sourceTypeFilterName, sourceFilterNames, directedRelationshipTypes) + .forEach(relationshipsJoiner::add); + String relationships = relationshipsJoiner.toString(); + + StringJoiner filterJoiner = new StringJoiner("\n "); + filters.forEach(filterJoiner::add); + String filterExpressions = filterJoiner.toString(); + + return String.format( + "query {\n" + + " %s\n" + + "\n" + + " result (func: uid(%s), first: %d, offset: %d) %s {\n" + + " \n" + + " %s\n" + + " }\n" + + "}", + filterExpressions, destinationNodeFilter, count, offset, filterConditions, relationships); + } + + @Override + public void upsertEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Upsert edge not supported by Neo4JGraphService at this time."); + } + + @Override + public void removeEdge(final Edge edge) { + throw new UnsupportedOperationException( + "Remove edge not supported by DgraphGraphService at this time."); + } + + @Nonnull + @Override + public RelatedEntitiesResult findRelatedEntities( + @Nullable List sourceTypes, + @Nonnull Filter sourceEntityFilter, + @Nullable List destinationTypes, + @Nonnull Filter destinationEntityFilter, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter, + int offset, + int count) { + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); + } + if (relationshipTypes.isEmpty() + || relationshipTypes.stream() + .noneMatch(relationship -> get_schema().hasField(relationship))) { + return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } - @Nonnull - @Override - public RelatedEntitiesResult findRelatedEntities(@Nullable List sourceTypes, - @Nonnull Filter sourceEntityFilter, - @Nullable List destinationTypes, - @Nonnull Filter destinationEntityFilter, - @Nonnull List relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter, - int offset, - int count) { - - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - if (relationshipTypes.isEmpty() || relationshipTypes.stream().noneMatch(relationship -> get_schema().hasField(relationship))) { - return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); - } - - String query = getQueryForRelatedEntities( - sourceTypes, sourceEntityFilter, - destinationTypes, destinationEntityFilter, - relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), - relationshipFilter, - offset, count - ); - - Request request = Request.newBuilder() - .setQuery(query) - .build(); - - log.debug("Query: " + query); - Response response = _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); - String json = response.getJson().toStringUtf8(); - Map data = getDataFromResponseJson(json); - - List entities = getRelatedEntitiesFromResponseData(data); - int total = offset + entities.size(); - if (entities.size() == count) { - // indicate that there might be more results - total++; - } - return new RelatedEntitiesResult(offset, entities.size(), total, entities); + String query = + getQueryForRelatedEntities( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes.stream().filter(get_schema()::hasField).collect(Collectors.toList()), + relationshipFilter, + offset, + count); + + Request request = Request.newBuilder().setQuery(query).build(); + + log.debug("Query: " + query); + Response response = + _dgraph.executeFunction(client -> client.newReadOnlyTransaction().doRequest(request)); + String json = response.getJson().toStringUtf8(); + Map data = getDataFromResponseJson(json); + + List entities = getRelatedEntitiesFromResponseData(data); + int total = offset + entities.size(); + if (entities.size() == count) { + // indicate that there might be more results + total++; + } + return new RelatedEntitiesResult(offset, entities.size(), total, entities); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull String getFilterConditions( + @Nullable String sourceTypeFilterName, + @Nullable String destinationTypeFilterName, + @Nonnull List sourceFilterNames, + @Nonnull List destinationFilterNames, + @Nonnull List relationshipTypeFilterNames, + @Nonnull List relationshipTypes) { + if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { + throw new IllegalArgumentException( + "relationshipTypeFilterNames and relationshipTypes " + + "must have same size: " + + relationshipTypeFilterNames + + " vs. " + + relationshipTypes); } - // Creates filter conditions from destination to source nodes - protected static @Nonnull String getFilterConditions(@Nullable String sourceTypeFilterName, - @Nullable String destinationTypeFilterName, - @Nonnull List sourceFilterNames, - @Nonnull List destinationFilterNames, - @Nonnull List relationshipTypeFilterNames, - @Nonnull List relationshipTypes) { - if (relationshipTypes.size() != relationshipTypeFilterNames.size()) { - throw new IllegalArgumentException("relationshipTypeFilterNames and relationshipTypes " - + "must have same size: " + relationshipTypeFilterNames + " vs. " + relationshipTypes); - } - - if (sourceTypeFilterName == null - && destinationTypeFilterName == null - && sourceFilterNames.isEmpty() - && destinationFilterNames.isEmpty() - && relationshipTypeFilterNames.isEmpty()) { - return ""; - } - - StringJoiner andJoiner = new StringJoiner(" AND\n "); - if (destinationTypeFilterName != null) { - andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); - } - - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); - - if (!relationshipTypes.isEmpty()) { - StringJoiner orJoiner = new StringJoiner(" OR\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> orJoiner.add(getRelationshipCondition( - relationshipTypes.get(idx), relationshipTypeFilterNames.get(idx), - sourceTypeFilterName, sourceFilterNames - ))); - String relationshipCondition = orJoiner.toString(); - andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); - } - - String conditions = andJoiner.toString(); - return String.format("@filter(\n %s\n )", conditions); + if (sourceTypeFilterName == null + && destinationTypeFilterName == null + && sourceFilterNames.isEmpty() + && destinationFilterNames.isEmpty() + && relationshipTypeFilterNames.isEmpty()) { + return ""; } - protected static String getRelationshipCondition(@Nonnull String relationshipType, - @Nonnull String relationshipTypeFilterName, - @Nullable String objectFilterName, - @Nonnull List destinationFilterNames) { - StringJoiner andJoiner = new StringJoiner(" AND "); - andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); - if (objectFilterName != null) { - andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); - } - destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); - return andJoiner.toString(); + StringJoiner andJoiner = new StringJoiner(" AND\n "); + if (destinationTypeFilterName != null) { + andJoiner.add(String.format("uid(%s)", destinationTypeFilterName)); } + destinationFilterNames.forEach(filter -> andJoiner.add(String.format("uid(%s)", filter))); + + if (!relationshipTypes.isEmpty()) { + StringJoiner orJoiner = new StringJoiner(" OR\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + orJoiner.add( + getRelationshipCondition( + relationshipTypes.get(idx), + relationshipTypeFilterNames.get(idx), + sourceTypeFilterName, + sourceFilterNames))); + String relationshipCondition = orJoiner.toString(); + andJoiner.add(String.format("(\n %s\n )", relationshipCondition)); + } - // Creates filter conditions from destination to source nodes - protected static @Nonnull List getRelationships(@Nullable String sourceTypeFilterName, - @Nonnull List sourceFilterNames, - @Nonnull List relationshipTypes) { - return relationshipTypes.stream().map(relationshipType -> { - StringJoiner andJoiner = new StringJoiner(" AND "); - if (sourceTypeFilterName != null) { + String conditions = andJoiner.toString(); + return String.format("@filter(\n %s\n )", conditions); + } + + protected static String getRelationshipCondition( + @Nonnull String relationshipType, + @Nonnull String relationshipTypeFilterName, + @Nullable String objectFilterName, + @Nonnull List destinationFilterNames) { + StringJoiner andJoiner = new StringJoiner(" AND "); + andJoiner.add(String.format("uid(%s)", relationshipTypeFilterName)); + if (objectFilterName != null) { + andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, objectFilterName)); + } + destinationFilterNames.forEach( + filter -> andJoiner.add(String.format("uid_in(<%s>, uid(%s))", relationshipType, filter))); + return andJoiner.toString(); + } + + // Creates filter conditions from destination to source nodes + protected static @Nonnull List getRelationships( + @Nullable String sourceTypeFilterName, + @Nonnull List sourceFilterNames, + @Nonnull List relationshipTypes) { + return relationshipTypes.stream() + .map( + relationshipType -> { + StringJoiner andJoiner = new StringJoiner(" AND "); + if (sourceTypeFilterName != null) { andJoiner.add(String.format("uid(%s)", sourceTypeFilterName)); - } - sourceFilterNames.forEach(filterName -> andJoiner.add(String.format("uid(%s)", filterName))); + } + sourceFilterNames.forEach( + filterName -> andJoiner.add(String.format("uid(%s)", filterName))); - if (andJoiner.length() > 0) { + if (andJoiner.length() > 0) { return String.format("<%s> @filter( %s ) { }", relationshipType, andJoiner); - } else { + } else { return String.format("<%s> { }", relationshipType); - } - }).collect(Collectors.toList()); + } + }) + .collect(Collectors.toList()); + } + + protected static Map getDataFromResponseJson(String json) { + ObjectMapper mapper = new ObjectMapper(); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + TypeReference> typeRef = + new TypeReference>() {}; + try { + return mapper.readValue(json, typeRef); + } catch (IOException e) { + throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); } - - protected static Map getDataFromResponseJson(String json) { - ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - TypeReference> typeRef = new TypeReference>() { }; - try { - return mapper.readValue(json, typeRef); - } catch (IOException e) { - throw new RuntimeException("Failed to parse response json: " + json.substring(0, 1000), e); - } + } + + protected static List getRelatedEntitiesFromResponseData( + Map data) { + Object obj = data.get("result"); + if (!(obj instanceof List)) { + throw new IllegalArgumentException( + "The result from Dgraph did not contain a 'result' field, or that field is not a List"); } - protected static List getRelatedEntitiesFromResponseData(Map data) { - Object obj = data.get("result"); - if (!(obj instanceof List)) { - throw new IllegalArgumentException( - "The result from Dgraph did not contain a 'result' field, or that field is not a List" - ); - } - - List results = (List) obj; - return results.stream().flatMap(destinationObj -> { - if (!(destinationObj instanceof Map)) { + List results = (List) obj; + return results.stream() + .flatMap( + destinationObj -> { + if (!(destinationObj instanceof Map)) { return Stream.empty(); - } + } - Map destination = (Map) destinationObj; - if (destination.containsKey("urn") && destination.get("urn") instanceof String) { + Map destination = (Map) destinationObj; + if (destination.containsKey("urn") && destination.get("urn") instanceof String) { String urn = (String) destination.get("urn"); return destination.entrySet().stream() - .filter(entry -> !entry.getKey().equals("urn")) - .flatMap(entry -> { - Object relationshipObj = entry.getKey(); - Object sourcesObj = entry.getValue(); - if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { - return Stream.empty(); - } - - String relationship = (String) relationshipObj; - List sources = (List) sourcesObj; - - if (sources.size() == 0) { - return Stream.empty(); - } - - if (relationship.startsWith("~")) { - relationship = relationship.substring(1); - } - - return Stream.of(relationship); + .filter(entry -> !entry.getKey().equals("urn")) + .flatMap( + entry -> { + Object relationshipObj = entry.getKey(); + Object sourcesObj = entry.getValue(); + if (!(relationshipObj instanceof String && sourcesObj instanceof List)) { + return Stream.empty(); + } + + String relationship = (String) relationshipObj; + List sources = (List) sourcesObj; + + if (sources.size() == 0) { + return Stream.empty(); + } + + if (relationship.startsWith("~")) { + relationship = relationship.substring(1); + } + + return Stream.of(relationship); }) - // for undirected we get duplicate relationships - .distinct() - .map(relationship -> new RelatedEntity(relationship, urn)); - } - - return Stream.empty(); - }).collect(Collectors.toList()); - } - - @Override - public void removeNode(@Nonnull Urn urn) { - String query = String.format("query {\n" - + " node as var(func: eq(urn, \"%s\"))\n" - + "}", urn); - String deletion = "uid(node) * * ."; - - log.debug("Query: " + query); - log.debug("Delete: " + deletion); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletion)) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + // for undirected we get duplicate relationships + .distinct() + .map(relationship -> new RelatedEntity(relationship, urn)); + } + + return Stream.empty(); + }) + .collect(Collectors.toList()); + } + + @Override + public void removeNode(@Nonnull Urn urn) { + String query = String.format("query {\n" + " node as var(func: eq(urn, \"%s\"))\n" + "}", urn); + String deletion = "uid(node) * * ."; + + log.debug("Query: " + query); + log.debug("Delete: " + deletion); + + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletion)).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + @Override + public void removeEdgesFromNode( + @Nonnull Urn urn, + @Nonnull List relationshipTypes, + @Nonnull RelationshipFilter relationshipFilter) { + if (relationshipTypes.isEmpty()) { + return; } - @Override - public void removeEdgesFromNode(@Nonnull Urn urn, - @Nonnull List relationshipTypes, - @Nonnull RelationshipFilter relationshipFilter) { - if (relationshipTypes.isEmpty()) { - return; - } - - RelationshipDirection direction = relationshipFilter.getDirection(); + RelationshipDirection direction = relationshipFilter.getDirection(); - if (direction == RelationshipDirection.OUTGOING || direction == RelationshipDirection.UNDIRECTED) { - removeOutgoingEdgesFromNode(urn, relationshipTypes); - } - - if (direction == RelationshipDirection.INCOMING || direction == RelationshipDirection.UNDIRECTED) { - removeIncomingEdgesFromNode(urn, relationshipTypes); - } + if (direction == RelationshipDirection.OUTGOING + || direction == RelationshipDirection.UNDIRECTED) { + removeOutgoingEdgesFromNode(urn, relationshipTypes); } - private void removeOutgoingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List relationshipTypes) { - // TODO: add escape for string values - String query = String.format("query {\n" - + " node as var(func: eq(, \"%s\"))\n" - + "}", urn); - - Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); - List deletions = relationshipTypes.stream().map(relationshipType -> - NQuad.newBuilder() + if (direction == RelationshipDirection.INCOMING + || direction == RelationshipDirection.UNDIRECTED) { + removeIncomingEdgesFromNode(urn, relationshipTypes); + } + } + + private void removeOutgoingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List relationshipTypes) { + // TODO: add escape for string values + String query = + String.format("query {\n" + " node as var(func: eq(, \"%s\"))\n" + "}", urn); + + Value star = Value.newBuilder().setDefaultVal("_STAR_ALL").build(); + List deletions = + relationshipTypes.stream() + .map( + relationshipType -> + NQuad.newBuilder() .setSubject("uid(node)") .setPredicate(relationshipType) .setObjectValue(star) - .build() - ).collect(Collectors.toList()); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .addAllDel(deletions) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } - - private void removeIncomingEdgesFromNode(@Nonnull Urn urn, - @Nonnull List relationshipTypes) { - // TODO: add escape for string values - StringJoiner reverseEdges = new StringJoiner("\n "); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - reverseEdges.add("<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }") - ); - String query = String.format("query {\n" + .build()) + .collect(Collectors.toList()); + + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); + + Mutation mutation = Mutation.newBuilder().addAllDel(deletions).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); + + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } + + private void removeIncomingEdgesFromNode( + @Nonnull Urn urn, @Nonnull List relationshipTypes) { + // TODO: add escape for string values + StringJoiner reverseEdges = new StringJoiner("\n "); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + reverseEdges.add( + "<~" + relationshipTypes.get(idx) + "> { uids" + (idx + 1) + " as uid }")); + String query = + String.format( + "query {\n" + " node as var(func: eq(, \"%s\"))\n" + "\n" + " var(func: uid(node)) @normalize {\n" + " %s\n" + " }\n" - + "}", urn, reverseEdges); - - StringJoiner deletions = new StringJoiner("\n"); - IntStream.range(0, relationshipTypes.size()).forEach(idx -> - deletions.add("uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .") - ); - - log.debug("Query: " + query); - log.debug("Deletions: " + deletions); - - Mutation mutation = Mutation.newBuilder() - .setDelNquads(ByteString.copyFromUtf8(deletions.toString())) - .build(); - Request request = Request.newBuilder() - .setQuery(query) - .addMutations(mutation) - .setCommitNow(true) - .build(); - - _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); - } + + "}", + urn, reverseEdges); - @Override - public void configure() { } + StringJoiner deletions = new StringJoiner("\n"); + IntStream.range(0, relationshipTypes.size()) + .forEach( + idx -> + deletions.add( + "uid(uids" + (idx + 1) + ") <" + relationshipTypes.get(idx) + "> uid(node) .")); - @Override - public void clear() { - log.debug("dropping Dgraph data"); + log.debug("Query: " + query); + log.debug("Deletions: " + deletions); - Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); - _dgraph.executeConsumer(client -> client.alter(dropAll)); + Mutation mutation = + Mutation.newBuilder().setDelNquads(ByteString.copyFromUtf8(deletions.toString())).build(); + Request request = + Request.newBuilder().setQuery(query).addMutations(mutation).setCommitNow(true).build(); - // drop schema cache - get_schema().clear(); + _dgraph.executeConsumer(client -> client.newTransaction().doRequest(request)); + } - // setup urn, type and key relationships - getSchema(); - } + @Override + public void configure() {} + + @Override + public void clear() { + log.debug("dropping Dgraph data"); + + Operation dropAll = Operation.newBuilder().setDropOp(Operation.DropOp.ALL).build(); + _dgraph.executeConsumer(client -> client.alter(dropAll)); + + // drop schema cache + get_schema().clear(); + + // setup urn, type and key relationships + getSchema(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java index fc1c64ea3cc03..8c4b37716e798 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/dgraph/DgraphSchema.java @@ -1,9 +1,6 @@ package com.linkedin.metadata.graph.dgraph; import io.dgraph.DgraphProto; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -11,118 +8,125 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; -/** - * Provides a thread-safe Dgraph schema. Returned data structures are immutable. - */ +/** Provides a thread-safe Dgraph schema. Returned data structures are immutable. */ @Slf4j public class DgraphSchema { - private final @Nonnull Set fields; - private final @Nonnull Map> types; - private final DgraphExecutor dgraph; - - public static DgraphSchema empty() { - return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); - } - - public DgraphSchema(@Nonnull Set fields, @Nonnull Map> types) { - this(fields, types, null); - } - - public DgraphSchema(@Nonnull Set fields, @Nonnull Map> types, DgraphExecutor dgraph) { - this.fields = fields; - this.types = types; - this.dgraph = dgraph; - } - - /** - * Adds the given DgraphExecutor to this schema returning a new instance. - * Be aware this and the new instance share the underlying fields and types datastructures. - * - * @param dgraph dgraph executor to add - * @return new instance - */ - public DgraphSchema withDgraph(DgraphExecutor dgraph) { - return new DgraphSchema(this.fields, this.types, dgraph); + private final @Nonnull Set fields; + private final @Nonnull Map> types; + private final DgraphExecutor dgraph; + + public static DgraphSchema empty() { + return new DgraphSchema(Collections.emptySet(), Collections.emptyMap(), null); + } + + public DgraphSchema(@Nonnull Set fields, @Nonnull Map> types) { + this(fields, types, null); + } + + public DgraphSchema( + @Nonnull Set fields, @Nonnull Map> types, DgraphExecutor dgraph) { + this.fields = fields; + this.types = types; + this.dgraph = dgraph; + } + + /** + * Adds the given DgraphExecutor to this schema returning a new instance. Be aware this and the + * new instance share the underlying fields and types datastructures. + * + * @param dgraph dgraph executor to add + * @return new instance + */ + public DgraphSchema withDgraph(DgraphExecutor dgraph) { + return new DgraphSchema(this.fields, this.types, dgraph); + } + + public synchronized boolean isEmpty() { + return fields.isEmpty(); + } + + public synchronized Set getFields() { + // Provide an unmodifiable copy + return Collections.unmodifiableSet(new HashSet<>(fields)); + } + + public synchronized Set getFields(String typeName) { + // Provide an unmodifiable copy + return Collections.unmodifiableSet( + new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); + } + + public synchronized Map> getTypes() { + // Provide an unmodifiable copy of the map and contained sets + return Collections.unmodifiableMap( + new HashSet<>(types.entrySet()) + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> Collections.unmodifiableSet(new HashSet<>(e.getValue()))))); + } + + public synchronized boolean hasType(String typeName) { + return types.containsKey(typeName); + } + + public synchronized boolean hasField(String fieldName) { + return fields.contains(fieldName); + } + + public synchronized boolean hasField(String typeName, String fieldName) { + return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + } + + public synchronized void ensureField( + String typeName, String fieldName, String... existingFieldNames) { + // quickly check if the field is known for this type + if (hasField(typeName, fieldName)) { + return; } - synchronized public boolean isEmpty() { - return fields.isEmpty(); - } - - synchronized public Set getFields() { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(fields)); - } - - synchronized public Set getFields(String typeName) { - // Provide an unmodifiable copy - return Collections.unmodifiableSet(new HashSet<>(types.getOrDefault(typeName, Collections.emptySet()))); - } - - synchronized public Map> getTypes() { - // Provide an unmodifiable copy of the map and contained sets - return Collections.unmodifiableMap( - new HashSet<>(types.entrySet()).stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> Collections.unmodifiableSet(new HashSet<>(e.getValue())) - )) - ); - } - - synchronized public boolean hasType(String typeName) { - return types.containsKey(typeName); - } - - synchronized public boolean hasField(String fieldName) { - return fields.contains(fieldName); - } + // add type and field to schema + StringJoiner schema = new StringJoiner("\n"); - synchronized public boolean hasField(String typeName, String fieldName) { - return types.getOrDefault(typeName, Collections.emptySet()).contains(fieldName); + if (!fields.contains(fieldName)) { + schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); } - synchronized public void ensureField(String typeName, String fieldName, String... existingFieldNames) { - // quickly check if the field is known for this type - if (hasField(typeName, fieldName)) { - return; - } - - // add type and field to schema - StringJoiner schema = new StringJoiner("\n"); - - if (!fields.contains(fieldName)) { - schema.add(String.format("<%s>: [uid] @reverse .", fieldName)); - } - - // update the schema on the Dgraph cluster - Set allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); - allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); - allTypesFields.add(fieldName); - - if (dgraph != null) { - log.info("Adding predicate {} for type {} to schema", fieldName, typeName); - - StringJoiner type = new StringJoiner("\n "); - allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); - schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); - log.debug("Adding to schema: " + schema); - DgraphProto.Operation setSchema = DgraphProto.Operation.newBuilder().setSchema(schema.toString()).setRunInBackground(true).build(); - dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); - } - - // now that the schema has been updated on dgraph we can cache this new type / field - // ensure type and fields of type exist - if (!types.containsKey(typeName)) { - types.put(typeName, new HashSet<>()); - } - types.get(typeName).add(fieldName); - fields.add(fieldName); + // update the schema on the Dgraph cluster + Set allTypesFields = new HashSet<>(Arrays.asList(existingFieldNames)); + allTypesFields.addAll(types.getOrDefault(typeName, Collections.emptySet())); + allTypesFields.add(fieldName); + + if (dgraph != null) { + log.info("Adding predicate {} for type {} to schema", fieldName, typeName); + + StringJoiner type = new StringJoiner("\n "); + allTypesFields.stream().map(t -> "<" + t + ">").forEach(type::add); + schema.add(String.format("type <%s> {\n %s\n}", typeName, type)); + log.debug("Adding to schema: " + schema); + DgraphProto.Operation setSchema = + DgraphProto.Operation.newBuilder() + .setSchema(schema.toString()) + .setRunInBackground(true) + .build(); + dgraph.executeConsumer(dgraphClient -> dgraphClient.alter(setSchema)); } - synchronized public void clear() { - types.clear(); - fields.clear(); + // now that the schema has been updated on dgraph we can cache this new type / field + // ensure type and fields of type exist + if (!types.containsKey(typeName)) { + types.put(typeName, new HashSet<>()); } + types.get(typeName).add(fieldName); + fields.add(fieldName); + } + + public synchronized void clear() { + types.clear(); + fields.clear(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java index 946931a54f4ec..92960bc9222ab 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphQueryDAO.java @@ -1,7 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; @@ -10,6 +11,7 @@ import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.graph.GraphFilters; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.graph.LineageRelationship; @@ -55,12 +57,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESGraphQueryDAO { @@ -83,22 +80,29 @@ public class ESGraphQueryDAO { static final String UI = "UI"; @Nonnull - public static void addFilterToQueryBuilder(@Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { + public static void addFilterToQueryBuilder( + @Nonnull Filter filter, String node, BoolQueryBuilder rootQuery) { BoolQueryBuilder orQuery = new BoolQueryBuilder(); for (ConjunctiveCriterion conjunction : filter.getOr()) { final BoolQueryBuilder andQuery = new BoolQueryBuilder(); final List criterionArray = conjunction.getAnd(); - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Currently Elastic query filter only supports EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Currently Elastic query filter only supports EQUAL condition " + criterionArray); } criterionArray.forEach( - criterion -> andQuery.must(QueryBuilders.termQuery(node + "." + criterion.getField(), criterion.getValue()))); + criterion -> + andQuery.must( + QueryBuilders.termQuery( + node + "." + criterion.getField(), criterion.getValue()))); orQuery.should(andQuery); } rootQuery.must(orQuery); } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, final int offset, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, final int offset, final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -121,8 +125,12 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, fin } } - private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nullable Object[] sort, @Nullable String pitId, - @Nonnull String keepAlive, final int count) { + private SearchResponse executeSearchQuery( + @Nonnull final QueryBuilder query, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + final int count) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -141,36 +149,51 @@ private SearchResponse executeSearchQuery(@Nonnull final QueryBuilder query, @Nu log.error("Search query failed", e); throw new ESQueryException("Search query failed:", e); } - } - public SearchResponse getSearchResponse(@Nullable final List sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count) { + public SearchResponse getSearchResponse( + @Nullable final List sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { BoolQueryBuilder finalQuery = - buildQuery(sourceTypes, sourceEntityFilter, destinationTypes, destinationEntityFilter, relationshipTypes, + buildQuery( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, relationshipFilter); return executeSearchQuery(finalQuery, offset, count); } - public static BoolQueryBuilder buildQuery(@Nullable final List sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public static BoolQueryBuilder buildQuery( + @Nullable final List sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); // set source filter - String sourceNode = relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; + String sourceNode = + relationshipDirection == RelationshipDirection.OUTGOING ? SOURCE : DESTINATION; if (sourceTypes != null && sourceTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(sourceNode + ".entityType", sourceTypes)); } addFilterToQueryBuilder(sourceEntityFilter, sourceNode, finalQuery); // set destination filter - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? DESTINATION : SOURCE; if (destinationTypes != null && destinationTypes.size() > 0) { finalQuery.must(QueryBuilders.termsQuery(destinationNode + ".entityType", destinationTypes)); } @@ -180,16 +203,24 @@ public static BoolQueryBuilder buildQuery(@Nullable final List sourceTyp if (relationshipTypes.size() > 0) { BoolQueryBuilder relationshipQuery = QueryBuilders.boolQuery(); relationshipTypes.forEach( - relationshipType -> relationshipQuery.should(QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); + relationshipType -> + relationshipQuery.should( + QueryBuilders.termQuery(RELATIONSHIP_TYPE, relationshipType))); finalQuery.must(relationshipQuery); } return finalQuery; } @WithSpan - public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, - int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + public LineageResponse getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { List result = new ArrayList<>(); long currentTime = System.currentTimeMillis(); long remainingTime = graphQueryConfiguration.getTimeoutSeconds() * 1000; @@ -207,8 +238,11 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect } if (remainingTime < 0) { - log.info("Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", - entityUrn, direction, maxHops); + log.info( + "Timed out while fetching lineage for {} with direction {}, maxHops {}. Returning results so far", + entityUrn, + direction, + maxHops); break; } @@ -225,7 +259,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect startTimeMillis, endTimeMillis); result.addAll(oneHopRelationships); - currentLevel = oneHopRelationships.stream().map(LineageRelationship::getEntity).collect(Collectors.toList()); + currentLevel = + oneHopRelationships.stream() + .map(LineageRelationship::getEntity) + .collect(Collectors.toList()); currentTime = System.currentTimeMillis(); remainingTime = timeoutTime - currentTime; } @@ -235,7 +272,10 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect if (offset >= response.getTotal()) { subList = Collections.emptyList(); } else { - subList = response.getLineageRelationships().subList(offset, Math.min(offset + count, response.getTotal())); + subList = + response + .getLineageRelationships() + .subList(offset, Math.min(offset + count, response.getTotal())); } return new LineageResponse(response.getTotal(), subList); @@ -243,23 +283,35 @@ public LineageResponse getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirect // Get 1-hop lineage relationships asynchronously in batches with timeout @WithSpan - public List getLineageRelationshipsInBatches(@Nonnull List entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set visitedEntities, int numHops, - long remainingTime, Map existingPaths, @Nullable Long startTimeMillis, + public List getLineageRelationshipsInBatches( + @Nonnull List entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set visitedEntities, + int numHops, + long remainingTime, + Map existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { List> batches = Lists.partition(entityUrns, graphQueryConfiguration.getBatchSize()); - return ConcurrencyUtils.getAllCompleted(batches.stream() - .map(batchUrns -> CompletableFuture.supplyAsync( - () -> getLineageRelationships( - batchUrns, - direction, - graphFilters, - visitedEntities, - numHops, - existingPaths, - startTimeMillis, - endTimeMillis))) - .collect(Collectors.toList()), remainingTime, TimeUnit.MILLISECONDS) + return ConcurrencyUtils.getAllCompleted( + batches.stream() + .map( + batchUrns -> + CompletableFuture.supplyAsync( + () -> + getLineageRelationships( + batchUrns, + direction, + graphFilters, + visitedEntities, + numHops, + existingPaths, + startTimeMillis, + endTimeMillis))) + .collect(Collectors.toList()), + remainingTime, + TimeUnit.MILLISECONDS) .stream() .flatMap(List::stream) .collect(Collectors.toList()); @@ -267,42 +319,56 @@ public List getLineageRelationshipsInBatches(@Nonnull List< // Get 1-hop lineage relationships @WithSpan - private List getLineageRelationships(@Nonnull List entityUrns, - @Nonnull LineageDirection direction, GraphFilters graphFilters, Set visitedEntities, int numHops, - Map existingPaths, @Nullable Long startTimeMillis, + private List getLineageRelationships( + @Nonnull List entityUrns, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + Set visitedEntities, + int numHops, + Map existingPaths, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { - Map> urnsPerEntityType = entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); - Map> edgesPerEntityType = urnsPerEntityType.keySet() - .stream() - .collect(Collectors.toMap(Function.identity(), - entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); + Map> urnsPerEntityType = + entityUrns.stream().collect(Collectors.groupingBy(Urn::getEntityType)); + Map> edgesPerEntityType = + urnsPerEntityType.keySet().stream() + .collect( + Collectors.toMap( + Function.identity(), + entityType -> lineageRegistry.getLineageRelationships(entityType, direction))); BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Get all relation types relevant to the set of urns to hop from - urnsPerEntityType.forEach((entityType, urns) -> finalQuery.should( - getQueryForLineage( - urns, - edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), - graphFilters, - startTimeMillis, - endTimeMillis))); - SearchResponse response = executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); + urnsPerEntityType.forEach( + (entityType, urns) -> + finalQuery.should( + getQueryForLineage( + urns, + edgesPerEntityType.getOrDefault(entityType, Collections.emptyList()), + graphFilters, + startTimeMillis, + endTimeMillis))); + SearchResponse response = + executeSearchQuery(finalQuery, 0, graphQueryConfiguration.getMaxResult()); Set entityUrnSet = new HashSet<>(entityUrns); // Get all valid edges given the set of urns to hop from - Set> validEdges = edgesPerEntityType.entrySet() - .stream() - .flatMap(entry -> entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) - .collect(Collectors.toSet()); - return extractRelationships(entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); + Set> validEdges = + edgesPerEntityType.entrySet().stream() + .flatMap( + entry -> + entry.getValue().stream().map(edgeInfo -> Pair.of(entry.getKey(), edgeInfo))) + .collect(Collectors.toSet()); + return extractRelationships( + entityUrnSet, response, validEdges, visitedEntities, numHops, existingPaths); } // Get search query for given list of edges and source urns @VisibleForTesting public static QueryBuilder getQueryForLineage( - @Nonnull List urns, - @Nonnull List lineageEdges, - @Nonnull GraphFilters graphFilters, - @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis) { + @Nonnull List urns, + @Nonnull List lineageEdges, + @Nonnull GraphFilters graphFilters, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { BoolQueryBuilder query = QueryBuilders.boolQuery(); if (lineageEdges.isEmpty()) { return query; @@ -328,43 +394,46 @@ public static QueryBuilder getQueryForLineage( if (startTimeMillis != null && endTimeMillis != null) { query.must(TimeFilterUtils.getEdgeTimeFilterQuery(startTimeMillis, endTimeMillis)); } else { - log.debug(String.format( - "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", - startTimeMillis, - endTimeMillis)); + log.debug( + String.format( + "Empty time filter range provided: start time %s, end time: %s. Skipping application of time filters", + startTimeMillis, endTimeMillis)); } return query; } /** - * Adds an individual relationship edge to a running set of unique paths to each node in the graph. + * Adds an individual relationship edge to a running set of unique paths to each node in the + * graph. * - * Specifically, this method updates 'existingPaths', which is a map of an entity urn representing a node in the - * lineage graph to the full paths that can be traversed to reach it from a the origin node for which lineage - * was requested. + *

Specifically, this method updates 'existingPaths', which is a map of an entity urn + * representing a node in the lineage graph to the full paths that can be traversed to reach it + * from a the origin node for which lineage was requested. * - * This method strictly assumes that edges are being added IN ORDER, level-by-level working outwards from the originally - * requested source node. If edges are added to the path set in an out of order manner, then the paths to a given node - * may be partial / incomplete. + *

This method strictly assumes that edges are being added IN ORDER, level-by-level working + * outwards from the originally requested source node. If edges are added to the path set in an + * out of order manner, then the paths to a given node may be partial / incomplete. * - * Note that calling this method twice with the same edge is not safe. It will result in duplicate paths being appended - * into the list of paths to the provided child urn. + *

Note that calling this method twice with the same edge is not safe. It will result in + * duplicate paths being appended into the list of paths to the provided child urn. * - * @param existingPaths a running set of unique, uni-directional paths to each node in the graph starting from the original root node - * for which lineage was requested. - * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical source node in a uni-directional path from the source - * to the destination node. Note that this is NOT always the URN corresponding to the "source" field that is physically stored - * inside the Graph Store. - * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node in a uni-directional path from the - * source to the destination node. Note that this is NOT always the URN corresponding to the "destination" field that is - * physically stored inside the Graph Store. + * @param existingPaths a running set of unique, uni-directional paths to each node in the graph + * starting from the original root node for which lineage was requested. + * @param parentUrn the "parent" node (or source node) in the edge to add. This is a logical + * source node in a uni-directional path from the source to the destination node. Note that + * this is NOT always the URN corresponding to the "source" field that is physically stored + * inside the Graph Store. + * @param childUrn the "child" node (or dest node) in the edge to add. This is a logical dest node + * in a uni-directional path from the source to the destination node. Note that this is NOT + * always the URN corresponding to the "destination" field that is physically stored inside + * the Graph Store. */ @VisibleForTesting public static void addEdgeToPaths( - @Nonnull final Map existingPaths, - @Nonnull final Urn parentUrn, - @Nonnull final Urn childUrn) { + @Nonnull final Map existingPaths, + @Nonnull final Urn parentUrn, + @Nonnull final Urn childUrn) { // Collect all full-paths to this child node. This is what will be returned. UrnArrayArray pathsToParent = existingPaths.get(parentUrn); if (pathsToParent != null && pathsToParent.size() > 0) { @@ -388,16 +457,22 @@ public static void addEdgeToPaths( } } - // Given set of edges and the search response, extract all valid edges that originate from the input entityUrns + // Given set of edges and the search response, extract all valid edges that originate from the + // input entityUrns @WithSpan - private static List extractRelationships(@Nonnull Set entityUrns, - @Nonnull SearchResponse searchResponse, Set> validEdges, Set visitedEntities, - int numHops, Map existingPaths) { + private static List extractRelationships( + @Nonnull Set entityUrns, + @Nonnull SearchResponse searchResponse, + Set> validEdges, + Set visitedEntities, + int numHops, + Map existingPaths) { final List result = new LinkedList<>(); final SearchHit[] hits = searchResponse.getHits().getHits(); for (SearchHit hit : hits) { final Map document = hit.getSourceAsMap(); - final Urn sourceUrn = UrnUtils.getUrn(((Map) document.get(SOURCE)).get("urn").toString()); + final Urn sourceUrn = + UrnUtils.getUrn(((Map) document.get(SOURCE)).get("urn").toString()); final Urn destinationUrn = UrnUtils.getUrn(((Map) document.get(DESTINATION)).get("urn").toString()); final String type = document.get(RELATIONSHIP_TYPE).toString(); @@ -406,9 +481,11 @@ private static List extractRelationships(@Nonnull Set final Number updatedOnNumber = (Number) document.getOrDefault(UPDATED_ON, null); final Long updatedOn = updatedOnNumber != null ? updatedOnNumber.longValue() : null; final String createdActorString = (String) document.getOrDefault(CREATED_ACTOR, null); - final Urn createdActor = createdActorString == null ? null : UrnUtils.getUrn(createdActorString); + final Urn createdActor = + createdActorString == null ? null : UrnUtils.getUrn(createdActorString); final String updatedActorString = (String) document.getOrDefault(UPDATED_ACTOR, null); - final Urn updatedActor = updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); + final Urn updatedActor = + updatedActorString == null ? null : UrnUtils.getUrn(updatedActorString); final Map properties; if (document.containsKey(PROPERTIES) && document.get(PROPERTIES) instanceof Map) { properties = (Map) document.get(PROPERTIES); @@ -422,9 +499,14 @@ private static List extractRelationships(@Nonnull Set // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(destinationUrn) && validEdges.contains( - Pair.of(sourceUrn.getEntityType(), - new EdgeInfo(type, RelationshipDirection.OUTGOING, destinationUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(destinationUrn) + && validEdges.contains( + Pair.of( + sourceUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.OUTGOING, + destinationUrn.getEntityType().toLowerCase())))) { visitedEntities.add(destinationUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, sourceUrn, destinationUrn); @@ -433,7 +515,9 @@ private static List extractRelationships(@Nonnull Set type, destinationUrn, numHops, - existingPaths.getOrDefault(destinationUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + existingPaths.getOrDefault( + destinationUrn, + new UrnArrayArray()), // Fetch the paths to the next level entity. createdOn, createdActor, updatedOn, @@ -448,21 +532,29 @@ private static List extractRelationships(@Nonnull Set // Skip if already visited // Skip if edge is not a valid outgoing edge // TODO: Verify if this honors multiple paths to the same node. - if (!visitedEntities.contains(sourceUrn) && validEdges.contains( - Pair.of(destinationUrn.getEntityType(), new EdgeInfo(type, RelationshipDirection.INCOMING, sourceUrn.getEntityType().toLowerCase())))) { + if (!visitedEntities.contains(sourceUrn) + && validEdges.contains( + Pair.of( + destinationUrn.getEntityType(), + new EdgeInfo( + type, + RelationshipDirection.INCOMING, + sourceUrn.getEntityType().toLowerCase())))) { visitedEntities.add(sourceUrn); // Append the edge to a set of unique graph paths. addEdgeToPaths(existingPaths, destinationUrn, sourceUrn); - final LineageRelationship relationship = createLineageRelationship( - type, - sourceUrn, - numHops, - existingPaths.getOrDefault(sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. - createdOn, - createdActor, - updatedOn, - updatedActor, - isManual); + final LineageRelationship relationship = + createLineageRelationship( + type, + sourceUrn, + numHops, + existingPaths.getOrDefault( + sourceUrn, new UrnArrayArray()), // Fetch the paths to the next level entity. + createdOn, + createdActor, + updatedOn, + updatedActor, + isManual); result.add(relationship); } } @@ -479,10 +571,13 @@ private static LineageRelationship createLineageRelationship( @Nullable final Urn createdActor, @Nullable final Long updatedOn, @Nullable final Urn updatedActor, - final boolean isManual - ) { + final boolean isManual) { final LineageRelationship relationship = - new LineageRelationship().setType(type).setEntity(entityUrn).setDegree(numHops).setPaths(paths); + new LineageRelationship() + .setType(type) + .setEntity(entityUrn) + .setDegree(numHops) + .setPaths(paths); if (createdOn != null) { relationship.setCreatedOn(createdOn); } @@ -507,18 +602,19 @@ private static BoolQueryBuilder getOutGoingEdgeQuery( outgoingEdgeQuery.must(buildUrnFilters(urns, SOURCE)); outgoingEdgeQuery.must(buildEdgeFilters(outgoingEdges)); outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - outgoingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + outgoingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return outgoingEdgeQuery; } private static BoolQueryBuilder getIncomingEdgeQuery( - @Nonnull List urns, List incomingEdges, - @Nonnull GraphFilters graphFilters) { + @Nonnull List urns, List incomingEdges, @Nonnull GraphFilters graphFilters) { BoolQueryBuilder incomingEdgeQuery = QueryBuilders.boolQuery(); incomingEdgeQuery.must(buildUrnFilters(urns, DESTINATION)); incomingEdgeQuery.must(buildEdgeFilters(incomingEdges)); incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), SOURCE)); - incomingEdgeQuery.must(buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); + incomingEdgeQuery.must( + buildEntityTypesFilter(graphFilters.getAllowedEntityTypes(), DESTINATION)); return incomingEdgeQuery; } @@ -530,16 +626,21 @@ private static UrnArray clonePath(final UrnArray basePath) { } } - private static QueryBuilder buildEntityTypesFilter(@Nonnull List entityTypes, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".entityType", entityTypes.stream().map(Object::toString).collect(Collectors.toList())); + private static QueryBuilder buildEntityTypesFilter( + @Nonnull List entityTypes, @Nonnull String prefix) { + return QueryBuilders.termsQuery( + prefix + ".entityType", + entityTypes.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildUrnFilters(@Nonnull List urns, @Nonnull String prefix) { - return QueryBuilders.termsQuery(prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); + return QueryBuilders.termsQuery( + prefix + ".urn", urns.stream().map(Object::toString).collect(Collectors.toList())); } private static QueryBuilder buildEdgeFilters(@Nonnull List edgeInfos) { - return QueryBuilders.termsQuery("relationshipType", + return QueryBuilders.termsQuery( + "relationshipType", edgeInfos.stream().map(EdgeInfo::getType).distinct().collect(Collectors.toList())); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java index f8b0e8a291e7a..5d722a034fafc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ESGraphWriteDAO.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.RelationshipFilter; @@ -16,10 +19,6 @@ import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.reindex.BulkByScrollResponse; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.buildQuery; -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESGraphWriteDAO { @@ -36,8 +35,8 @@ public class ESGraphWriteDAO { * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -56,15 +55,24 @@ public void deleteDocument(@Nonnull String docId) { bulkProcessor.add(deleteRequest); } - public BulkByScrollResponse deleteByQuery(@Nullable final String sourceType, @Nonnull final Filter sourceEntityFilter, - @Nullable final String destinationType, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { + public BulkByScrollResponse deleteByQuery( + @Nullable final String sourceType, + @Nonnull final Filter sourceEntityFilter, + @Nullable final String destinationType, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter) { BoolQueryBuilder finalQuery = - buildQuery(sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), sourceEntityFilter, - destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), destinationEntityFilter, - relationshipTypes, relationshipFilter); + buildQuery( + sourceType == null ? ImmutableList.of() : ImmutableList.of(sourceType), + sourceEntityFilter, + destinationType == null ? ImmutableList.of() : ImmutableList.of(destinationType), + destinationEntityFilter, + relationshipTypes, + relationshipFilter); - return bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) - .orElse(null); + return bulkProcessor + .deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)) + .orElse(null); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java index 5fdf4d45ffa3b..6c828c0e7c6ae 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/ElasticSearchGraphService.java @@ -48,7 +48,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.index.query.QueryBuilders; - @Slf4j @RequiredArgsConstructor public class ElasticSearchGraphService implements GraphService, ElasticSearchIndexed { @@ -99,10 +98,7 @@ private String toDocument(@Nonnull final Edge edge) { throw new UnsupportedOperationException( String.format( "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), - entry.getValue() - ) - ); + entry.getKey(), entry.getValue())); } } searchDocument.set("properties", propertiesObject); @@ -113,8 +109,11 @@ private String toDocument(@Nonnull final Edge edge) { private String toDocId(@Nonnull final Edge edge) { String rawDocId = - edge.getSource().toString() + DOC_DELIMETER + edge.getRelationshipType() + DOC_DELIMETER + edge.getDestination() - .toString(); + edge.getSource().toString() + + DOC_DELIMETER + + edge.getRelationshipType() + + DOC_DELIMETER + + edge.getDestination().toString(); try { byte[] bytesOfRawDocID = rawDocId.getBytes(StandardCharsets.UTF_8); @@ -160,48 +159,55 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); - String destinationNode = relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; - - SearchResponse response = _graphReadDAO.getSearchResponse( - sourceTypes, - sourceEntityFilter, - destinationTypes, - destinationEntityFilter, - relationshipTypes, - relationshipFilter, - offset, - count - ); + String destinationNode = + relationshipDirection == RelationshipDirection.OUTGOING ? "destination" : "source"; + + SearchResponse response = + _graphReadDAO.getSearchResponse( + sourceTypes, + sourceEntityFilter, + destinationTypes, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + offset, + count); if (response == null) { return new RelatedEntitiesResult(offset, 0, 0, ImmutableList.of()); } int totalCount = (int) response.getHits().getTotalHits().value; - final List relationships = Arrays.stream(response.getHits().getHits()) - .map(hit -> { - final String urnStr = - ((HashMap) hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)).getOrDefault( - "urn", null); - final String relationshipType = (String) hit.getSourceAsMap().get("relationshipType"); - - if (urnStr == null || relationshipType == null) { - log.error(String.format( - "Found null urn string, relationship type, aspect name or path spec in Elastic index. " - + "urnStr: %s, relationshipType: %s", - urnStr, relationshipType)); - return null; - } - - return new RelatedEntity(relationshipType, urnStr); - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + final List relationships = + Arrays.stream(response.getHits().getHits()) + .map( + hit -> { + final String urnStr = + ((HashMap) + hit.getSourceAsMap().getOrDefault(destinationNode, EMPTY_HASH)) + .getOrDefault("urn", null); + final String relationshipType = + (String) hit.getSourceAsMap().get("relationshipType"); + + if (urnStr == null || relationshipType == null) { + log.error( + String.format( + "Found null urn string, relationship type, aspect name or path spec in Elastic index. " + + "urnStr: %s, relationshipType: %s", + urnStr, relationshipType)); + return null; + } + + return new RelatedEntity(relationshipType, urnStr); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); return new RelatedEntitiesResult(offset, relationships.size(), totalCount, relationships); } @@ -209,22 +215,18 @@ public RelatedEntitiesResult findRelatedEntities( @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops) { + int count, + int maxHops) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( - entityUrn, - direction, - graphFilters, - offset, - count, - maxHops, - null, - null); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + entityUrn, direction, graphFilters, offset, count, maxHops, null, null); + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -233,10 +235,15 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, GraphFilters graphFilters, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { ESGraphQueryDAO.LineageResponse lineageResponse = _graphReadDAO.getLineage( entityUrn, @@ -247,8 +254,8 @@ public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDi maxHops, startTimeMillis, endTimeMillis); - return new EntityLineageResult().setRelationships( - new LineageRelationshipArray(lineageResponse.getLineageRelationships())) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageResponse.getLineageRelationships())) .setStart(offset) .setCount(count) .setTotal(lineageResponse.getTotal()); @@ -262,7 +269,9 @@ private Filter createUrnFilter(@Nonnull final Urn urn) { criterion.setField("urn"); criterion.setValue(urn.toString()); criterionArray.add(criterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criterionArray)))); return filter; } @@ -272,26 +281,16 @@ public void removeNode(@Nonnull final Urn urn) { Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); List relationshipTypes = new ArrayList<>(); - RelationshipFilter outgoingFilter = new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); - RelationshipFilter incomingFilter = new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); + RelationshipFilter outgoingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.OUTGOING); + RelationshipFilter incomingFilter = + new RelationshipFilter().setDirection(RelationshipDirection.INCOMING); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - outgoingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, outgoingFilter); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - incomingFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, incomingFilter); return; } @@ -305,13 +304,7 @@ public void removeEdgesFromNode( Filter emptyFilter = new Filter().setOr(new ConjunctiveCriterionArray()); _graphWriteDAO.deleteByQuery( - null, - urnFilter, - null, - emptyFilter, - relationshipTypes, - relationshipFilter - ); + null, urnFilter, null, emptyFilter, relationshipTypes, relationshipFilter); } @Override @@ -328,8 +321,11 @@ public void configure() { @Override public List buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - GraphRelationshipMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + GraphRelationshipMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -340,7 +336,8 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } @Override diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java index cf97cf56023ad..21f2bf6c89204 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/GraphRelationshipMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class GraphRelationshipMappingsBuilder { - private GraphRelationshipMappingsBuilder() { } + private GraphRelationshipMappingsBuilder() {} public static Map getMappings() { Map mappings = new HashMap<>(); @@ -27,19 +26,19 @@ private static Map getMappingsForKeyword() { private static Map getMappingsForEntity() { - Map mappings = ImmutableMap.builder() - .put("urn", getMappingsForKeyword()) - .put("entityType", getMappingsForKeyword()) - .build(); + Map mappings = + ImmutableMap.builder() + .put("urn", getMappingsForKeyword()) + .put("entityType", getMappingsForKeyword()) + .build(); return ImmutableMap.of("properties", mappings); } private static Map getMappingsForEdgeProperties() { - Map propertyMappings = ImmutableMap.builder() - .put("source", getMappingsForKeyword()) - .build(); + Map propertyMappings = + ImmutableMap.builder().put("source", getMappingsForKeyword()).build(); return ImmutableMap.of("properties", propertyMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java index 1df938f902e0f..7ee84ce834cfa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/elastic/TimeFilterUtils.java @@ -1,28 +1,33 @@ package com.linkedin.metadata.graph.elastic; +import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; + import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; -import static com.linkedin.metadata.graph.elastic.ESGraphQueryDAO.*; - @Slf4j public class TimeFilterUtils { /** - * In order to filter for edges that fall into a specific filter window, we perform a range-overlap query. - * Note that both a start time and an end time must be provided in order to add the filters. + * In order to filter for edges that fall into a specific filter window, we perform a + * range-overlap query. Note that both a start time and an end time must be provided in order to + * add the filters. * - * A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a union operation. - * Each window is characterized by 2 points in time: a start time (e.g. created time of the edge) and an end time - * (e.g. last updated time of an edge). + *

A range overlap query compares 2 time windows for ANY overlap. This essentially equates to a + * union operation. Each window is characterized by 2 points in time: a start time (e.g. created + * time of the edge) and an end time (e.g. last updated time of an edge). * * @param startTimeMillis the start of the time filter window * @param endTimeMillis the end of the time filter window */ - public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, final long endTimeMillis) { - log.debug(String.format("Adding edge time filters for start time: %s, end time: %s", startTimeMillis, endTimeMillis)); + public static QueryBuilder getEdgeTimeFilterQuery( + final long startTimeMillis, final long endTimeMillis) { + log.debug( + String.format( + "Adding edge time filters for start time: %s, end time: %s", + startTimeMillis, endTimeMillis)); /* * One of the following must be true in order for the edge to be returned (should = OR) * @@ -30,7 +35,7 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi * 2. The createdOn and updatedOn window does not exist on the edge at all (support legacy cases) * 3. Special lineage case: The edge is marked as a "manual" edge, meaning that the time filters should NOT be applied. */ - BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder timeFilterQuery = QueryBuilders.boolQuery(); timeFilterQuery.should(buildTimeWindowFilter(startTimeMillis, endTimeMillis)); timeFilterQuery.should(buildTimestampsMissingFilter()); timeFilterQuery.should(buildManualLineageFilter()); @@ -38,61 +43,54 @@ public static QueryBuilder getEdgeTimeFilterQuery(final long startTimeMillis, fi } /** - * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This logic - * is a bit tricky so change with caution. - * - * The first window comes from start time and end time provided by the user. - * The second window comes from the createdOn and updatedOn timestamps present on graph edges. + * Builds a filter that compares 2 windows on a timeline and returns true for any overlap. This + * logic is a bit tricky so change with caution. * - * Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases performs - * a point overlap instead of a range overlap. + *

The first window comes from start time and end time provided by the user. The second window + * comes from the createdOn and updatedOn timestamps present on graph edges. * - * Range Examples: + *

Also accounts for the case where createdOn or updatedOn is MISSING, and in such cases + * performs a point overlap instead of a range overlap. * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + *

Range Examples: * - * = true + *

start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |------| - * createdOn -> updatedOn |--| + *

= true * - * = true + *

start time -> end time |------| createdOn -> updatedOn |--| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + *

= true * - * = true + *

start time -> end time |-----| createdOn -> updatedOn |-----| * - * start time -> end time |-----| - * createdOn -> updatedOn |-----| + *

= true * - * = false + *

start time -> end time |-----| createdOn -> updatedOn |-----| * + *

= false * - * Point Examples: + *

Point Examples: * - * start time -> end time |-----| - * updatedOn | + *

start time -> end time |-----| updatedOn | * - * = true + *

= true * - * start time -> end time |-----| - * updatedOn | + *

start time -> end time |-----| updatedOn | * - * = false + *

= false * - * and same for createdOn. + *

and same for createdOn. * - * Assumptions are that startTimeMillis is always before or equal to endTimeMillis, - * and createdOn is always before or equal to updatedOn. + *

Assumptions are that startTimeMillis is always before or equal to endTimeMillis, and + * createdOn is always before or equal to updatedOn. * * @param startTimeMillis the start time of the window in milliseconds * @param endTimeMillis the end time of the window in milliseconds - * * @return Query Builder with time window filters appended. */ - private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, final long endTimeMillis) { + private static QueryBuilder buildTimeWindowFilter( + final long startTimeMillis, final long endTimeMillis) { final BoolQueryBuilder timeWindowQuery = QueryBuilders.boolQuery(); /* @@ -107,12 +105,14 @@ private static QueryBuilder buildTimeWindowFilter(final long startTimeMillis, fi // Build filter comparing createdOn time to startTime->endTime window. BoolQueryBuilder createdOnFilter = QueryBuilders.boolQuery(); createdOnFilter.must(QueryBuilders.existsQuery(CREATED_ON)); - createdOnFilter.must(QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + createdOnFilter.must( + QueryBuilders.rangeQuery(CREATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Build filter comparing updatedOn time to startTime->endTime window. BoolQueryBuilder updatedOnFilter = QueryBuilders.boolQuery(); updatedOnFilter.must(QueryBuilders.existsQuery(UPDATED_ON)); - updatedOnFilter.must(QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); + updatedOnFilter.must( + QueryBuilders.rangeQuery(UPDATED_ON).gte(startTimeMillis).lte(endTimeMillis)); // Now - OR the 2 point comparison conditions together. timeWindowQuery.should(createdOnFilter); @@ -141,5 +141,5 @@ private static QueryBuilder buildManualLineageFilter() { return QueryBuilders.termQuery(String.format("%s.%s", PROPERTIES, SOURCE), UI); } - private TimeFilterUtils() { } + private TimeFilterUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java index ac57fb7db2b78..217d54c5c0b0f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphService.java @@ -56,7 +56,6 @@ import org.neo4j.driver.exceptions.Neo4jException; import org.neo4j.driver.types.Relationship; - @Slf4j public class Neo4jGraphService implements GraphService { @@ -69,7 +68,10 @@ public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driv this(lineageRegistry, driver, SessionConfig.defaultConfig()); } - public Neo4jGraphService(@Nonnull LineageRegistry lineageRegistry, @Nonnull Driver driver, @Nonnull SessionConfig sessionConfig) { + public Neo4jGraphService( + @Nonnull LineageRegistry lineageRegistry, + @Nonnull Driver driver, + @Nonnull SessionConfig sessionConfig) { this._lineageRegistry = lineageRegistry; this._driver = driver; this._sessionConfig = sessionConfig; @@ -83,22 +85,24 @@ public LineageRegistry getLineageRegistry() { @Override public void addEdge(@Nonnull final Edge edge) { - log.debug(String.format("Adding Edge source: %s, destination: %s, type: %s", - edge.getSource(), - edge.getDestination(), - edge.getRelationshipType())); + log.debug( + String.format( + "Adding Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); final String sourceUrn = edge.getSource().toString(); final String destinationUrn = edge.getDestination().toString(); - // Introduce startUrn, endUrn for real source node and destination node without consider direct or indirect pattern match + // Introduce startUrn, endUrn for real source node and destination node without consider direct + // or indirect pattern match String endUrn = destinationUrn; String startUrn = sourceUrn; String endType = destinationType; String startType = sourceType; - // Extra relationship typename start with r_ for direct-outgoing-downstream/indirect-incoming-upstream relationships + // Extra relationship typename start with r_ for + // direct-outgoing-downstream/indirect-incoming-upstream relationships String reverseRelationshipType = "r_" + edge.getRelationshipType(); if (isSourceDestReversed(sourceType, edge.getRelationshipType())) { @@ -117,10 +121,23 @@ public void addEdge(@Nonnull final Edge edge) { // Add/Update relationship final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'}),(destination:%s {urn: '%s'}) MERGE (source)-[r:%s]->(destination) "; - String statement = String.format(mergeRelationshipTemplate, sourceType, sourceUrn, destinationType, destinationUrn, - edge.getRelationshipType()); - - String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, endType, endUrn, reverseRelationshipType); + String statement = + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + destinationType, + destinationUrn, + edge.getRelationshipType()); + + String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + endType, + endUrn, + reverseRelationshipType); // Add/Update relationship properties String setCreatedOnTemplate; @@ -152,20 +169,23 @@ public void addEdge(@Nonnull final Edge edge) { Set.of("createdOn", "createdActor", "updatedOn", "updatedActor", "startUrn", "endUrn"); if (preservedKeySet.contains(entry.getKey())) { throw new UnsupportedOperationException( - String.format("Tried setting properties on graph edge but property key is preserved. Key: %s", + String.format( + "Tried setting properties on graph edge but property key is preserved. Key: %s", entry.getKey())); } if (entry.getValue() instanceof String) { setPropertyTemplate = String.format("r.%s = '%s'", entry.getKey(), entry.getValue()); propertiesTemplateJoiner.add(setPropertyTemplate); } else { - throw new UnsupportedOperationException(String.format( - "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", - entry.getKey(), entry.getValue())); + throw new UnsupportedOperationException( + String.format( + "Tried setting properties on graph edge but property value type is not supported. Key: %s, Value: %s ", + entry.getKey(), entry.getValue())); } } } - final String setStartEndUrnTemplate = String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); + final String setStartEndUrnTemplate = + String.format("r.startUrn = '%s', r.endUrn = '%s'", startUrn, endUrn); propertiesTemplateJoiner.add(setStartEndUrnTemplate); if (!StringUtils.isEmpty(propertiesTemplateJoiner.toString())) { statementR = String.format("%s SET %s", statementR, propertiesTemplateJoiner); @@ -184,8 +204,9 @@ public void upsertEdge(final Edge edge) { @Override public void removeEdge(final Edge edge) { log.debug( - String.format("Deleting Edge source: %s, destination: %s, type: %s", edge.getSource(), edge.getDestination(), - edge.getRelationshipType())); + String.format( + "Deleting Edge source: %s, destination: %s, type: %s", + edge.getSource(), edge.getDestination(), edge.getRelationshipType())); final String sourceType = edge.getSource().getEntityType(); final String destinationType = edge.getDestination().getEntityType(); @@ -208,11 +229,24 @@ public void removeEdge(final Edge edge) { final List statements = new ArrayList<>(); // DELETE relationship - final String mergeRelationshipTemplate = "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; + final String mergeRelationshipTemplate = + "MATCH (source:%s {urn: '%s'})-[r:%s]->(destination:%s {urn: '%s'}) DELETE r"; final String statement = - String.format(mergeRelationshipTemplate, sourceType, sourceUrn, edge.getRelationshipType(), destinationType, + String.format( + mergeRelationshipTemplate, + sourceType, + sourceUrn, + edge.getRelationshipType(), + destinationType, destinationUrn); - final String statementR = String.format(mergeRelationshipTemplate, startType, startUrn, reverseRelationshipType, endType, endUrn); + final String statementR = + String.format( + mergeRelationshipTemplate, + startType, + startUrn, + reverseRelationshipType, + endType, + endUrn); statements.add(buildStatement(statement, new HashMap<>())); statements.add(buildStatement(statementR, new HashMap<>())); @@ -222,49 +256,74 @@ public void removeEdge(final Edge edge) { @Nonnull @WithSpan @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } @Nonnull @Override - public EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + public EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { log.debug(String.format("Neo4j getLineage maxHops = %d", maxHops)); final var statementAndParams = - generateLineageStatementAndParameters(entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); + generateLineageStatementAndParameters( + entityUrn, direction, graphFilters, maxHops, startTimeMillis, endTimeMillis); final var statement = statementAndParams.getFirst(); final var parameters = statementAndParams.getSecond(); List neo4jResult = - statement != null ? runQuery(buildStatement(statement, parameters)).list() : new ArrayList<>(); + statement != null + ? runQuery(buildStatement(statement, parameters)).list() + : new ArrayList<>(); LineageRelationshipArray relations = new LineageRelationshipArray(); - neo4jResult.stream().skip(offset).limit(count).forEach(item -> { - String urn = item.values().get(2).asNode().get("urn").asString(); - try { - final var path = item.get(1).asPath(); - final List nodeListAsPath = StreamSupport.stream( - path.nodes().spliterator(), false) - .map(node -> createFromString(node.get("urn").asString())) - .collect(Collectors.toList()); - - final var firstRelationship = Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); - - relations.add(new LineageRelationship().setEntity(Urn.createFromString(urn)) - // although firstRelationship should never be absent, provide "" as fallback value - .setType(firstRelationship.map(Relationship::type).orElse("")) - .setDegree(path.length()) - .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); - } catch (URISyntaxException ignored) { - log.warn(String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); - } - }); - EntityLineageResult result = new EntityLineageResult().setStart(offset) + neo4jResult.stream() + .skip(offset) + .limit(count) + .forEach( + item -> { + String urn = item.values().get(2).asNode().get("urn").asString(); + try { + final var path = item.get(1).asPath(); + final List nodeListAsPath = + StreamSupport.stream(path.nodes().spliterator(), false) + .map(node -> createFromString(node.get("urn").asString())) + .collect(Collectors.toList()); + + final var firstRelationship = + Optional.ofNullable(Iterables.getFirst(path.relationships(), null)); + + relations.add( + new LineageRelationship() + .setEntity(Urn.createFromString(urn)) + // although firstRelationship should never be absent, provide "" as fallback + // value + .setType(firstRelationship.map(Relationship::type).orElse("")) + .setDegree(path.length()) + .setPaths(new UrnArrayArray(new UrnArray(nodeListAsPath)))); + } catch (URISyntaxException ignored) { + log.warn( + String.format("Can't convert urn = %s, Error = %s", urn, ignored.getMessage())); + } + }); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) .setCount(relations.size()) .setRelationships(relations) .setTotal(neo4jResult.size()); @@ -277,7 +336,8 @@ private String getPathFindingLabelFilter(List entityNames) { return entityNames.stream().map(x -> String.format("+%s", x)).collect(Collectors.joining("|")); } - private String getPathFindingRelationshipFilter(@Nonnull List entityNames, @Nullable LineageDirection direction) { + private String getPathFindingRelationshipFilter( + @Nonnull List entityNames, @Nullable LineageDirection direction) { // relationshipFilter supports mixing different directions for various relation types, // so simply transform entries lineage registry into format of filter final var filterComponents = new HashSet(); @@ -293,8 +353,10 @@ private String getPathFindingRelationshipFilter(@Nonnull List entityName } } else { // return disjunctive combination of edge types regardless of direction - for (final var direction1 : List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { - for (final var edgeInfo : _lineageRegistry.getLineageRelationships(entityName, direction1)) { + for (final var direction1 : + List.of(LineageDirection.UPSTREAM, LineageDirection.DOWNSTREAM)) { + for (final var edgeInfo : + _lineageRegistry.getLineageRelationships(entityName, direction1)) { filterComponents.add(edgeInfo.getType()); } } @@ -304,87 +366,111 @@ private String getPathFindingRelationshipFilter(@Nonnull List entityName } private Pair> generateLineageStatementAndParameters( - @Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int maxHops, - @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { - final var parameterMap = new HashMap(Map.of( - "urn", entityUrn.toString(), - "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), - "relationshipFilter", getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), direction), - "maxHops", maxHops - )); + final var parameterMap = + new HashMap( + Map.of( + "urn", entityUrn.toString(), + "labelFilter", getPathFindingLabelFilter(graphFilters.getAllowedEntityTypes()), + "relationshipFilter", + getPathFindingRelationshipFilter( + graphFilters.getAllowedEntityTypes(), direction), + "maxHops", maxHops)); if (startTimeMillis == null && endTimeMillis == null) { // if no time filtering required, simply find all expansion paths to other nodes - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.spanningTree(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD path " - + "WITH a, path AS path " - + "RETURN a, path, last(nodes(path));"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.spanningTree(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD path " + + "WITH a, path AS path " + + "RETURN a, path, last(nodes(path));"; return Pair.of(statement, parameterMap); } else { // when needing time filtering, possibility on multiple paths between two // nodes must be considered, and we need to construct more complex query // use r_ edges until they are no longer useful - final var relationFilter = getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) - .replaceAll("(\\w+)", "r_$1"); + final var relationFilter = + getPathFindingRelationshipFilter(graphFilters.getAllowedEntityTypes(), null) + .replaceAll("(\\w+)", "r_$1"); final var relationshipPattern = String.format( (direction == LineageDirection.UPSTREAM ? "<-[:%s*1..%d]-" : "-[:%s*1..%d]->"), - relationFilter, maxHops); + relationFilter, + maxHops); // two steps: // 1. find list of nodes reachable within maxHops // 2. find the shortest paths from start node to every other node in these nodes - // (note: according to the docs of shortestPath, WHERE conditions are applied during path exploration, not + // (note: according to the docs of shortestPath, WHERE conditions are applied during path + // exploration, not // after path exploration is done) - final var statement = "MATCH (a {urn: $urn}) " - + "CALL apoc.path.subgraphNodes(a, { " - + " relationshipFilter: $relationshipFilter, " - + " labelFilter: $labelFilter, " - + " minLevel: 1, " - + " maxLevel: $maxHops " - + "}) " - + "YIELD node AS b " - + "WITH a, b " - + "MATCH path = shortestPath((a)" + relationshipPattern + "(b)) " - + "WHERE a <> b " - + " AND ALL(rt IN relationships(path) WHERE " - + " (EXISTS(rt.source) AND rt.source = 'UI') OR " - + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " - + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " - + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " - + " ) " - + "RETURN a, path, b;"; + final var statement = + "MATCH (a {urn: $urn}) " + + "CALL apoc.path.subgraphNodes(a, { " + + " relationshipFilter: $relationshipFilter, " + + " labelFilter: $labelFilter, " + + " minLevel: 1, " + + " maxLevel: $maxHops " + + "}) " + + "YIELD node AS b " + + "WITH a, b " + + "MATCH path = shortestPath((a)" + + relationshipPattern + + "(b)) " + + "WHERE a <> b " + + " AND ALL(rt IN relationships(path) WHERE " + + " (EXISTS(rt.source) AND rt.source = 'UI') OR " + + " (NOT EXISTS(rt.createdOn) AND NOT EXISTS(rt.updatedOn)) OR " + + " ($startTimeMillis <= rt.createdOn <= $endTimeMillis OR " + + " $startTimeMillis <= rt.updatedOn <= $endTimeMillis) " + + " ) " + + "RETURN a, path, b;"; // provide dummy start/end time when not provided, so no need to // format clause differently if either of them is missing parameterMap.put("startTimeMillis", startTimeMillis == null ? 0 : startTimeMillis); - parameterMap.put("endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); + parameterMap.put( + "endTimeMillis", endTimeMillis == null ? System.currentTimeMillis() : endTimeMillis); return Pair.of(statement, parameterMap); } } @Nonnull - public RelatedEntitiesResult findRelatedEntities(@Nullable final List sourceTypes, - @Nonnull final Filter sourceEntityFilter, @Nullable final List destinationTypes, - @Nonnull final Filter destinationEntityFilter, @Nonnull final List relationshipTypes, - @Nonnull final RelationshipFilter relationshipFilter, final int offset, final int count) { - - log.debug(String.format("Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", - sourceTypes, sourceEntityFilter, destinationTypes) + String.format( - "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", destinationEntityFilter, - relationshipTypes, relationshipFilter) + String.format("offset: %s, count: %s", offset, count)); + public RelatedEntitiesResult findRelatedEntities( + @Nullable final List sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count) { - if (sourceTypes != null && sourceTypes.isEmpty() || destinationTypes != null && destinationTypes.isEmpty()) { + log.debug( + String.format( + "Finding related Neo4j nodes sourceType: %s, sourceEntityFilter: %s, destinationType: %s, ", + sourceTypes, sourceEntityFilter, destinationTypes) + + String.format( + "destinationEntityFilter: %s, relationshipTypes: %s, relationshipFilter: %s, ", + destinationEntityFilter, relationshipTypes, relationshipFilter) + + String.format("offset: %s, count: %s", offset, count)); + + if (sourceTypes != null && sourceTypes.isEmpty() + || destinationTypes != null && destinationTypes.isEmpty()) { return new RelatedEntitiesResult(offset, 0, 0, Collections.emptyList()); } @@ -401,7 +487,9 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List so matchTemplate = "MATCH (src %s)-[r%s %s]->(dest %s)%s"; } - final String returnNodes = String.format("RETURN dest, type(r)"); // Return both related entity and the relationship type. + final String returnNodes = + String.format( + "RETURN dest, type(r)"); // Return both related entity and the relationship type. final String returnCount = "RETURN count(*)"; // For getting the total results. String relationshipTypeFilter = ""; @@ -411,44 +499,70 @@ public RelatedEntitiesResult findRelatedEntities(@Nullable final List so String whereClause = computeEntityTypeWhereClause(sourceTypes, destinationTypes); - // Build Statement strings + // Build Statement strings String baseStatementString = - String.format(matchTemplate, srcCriteria, relationshipTypeFilter, edgeCriteria, destCriteria, whereClause); + String.format( + matchTemplate, + srcCriteria, + relationshipTypeFilter, + edgeCriteria, + destCriteria, + whereClause); log.info(baseStatementString); - final String resultStatementString = String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); + final String resultStatementString = + String.format("%s %s SKIP $offset LIMIT $count", baseStatementString, returnNodes); final String countStatementString = String.format("%s %s", baseStatementString, returnCount); // Build Statements - final Statement resultStatement = new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); - final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); + final Statement resultStatement = + new Statement(resultStatementString, ImmutableMap.of("offset", offset, "count", count)); + final Statement countStatement = new Statement(countStatementString, Collections.emptyMap()); // Execute Queries - final List relatedEntities = runQuery(resultStatement).list(record -> - new RelatedEntity( - record.values().get(1).asString(), // Relationship Type - record.values().get(0).asNode().get("urn").asString())); // Urn TODO: Validate this works against Neo4j. + final List relatedEntities = + runQuery(resultStatement) + .list( + record -> + new RelatedEntity( + record.values().get(1).asString(), // Relationship Type + record + .values() + .get(0) + .asNode() + .get("urn") + .asString())); // Urn TODO: Validate this works against Neo4j. final int totalCount = runQuery(countStatement).single().get(0).asInt(); return new RelatedEntitiesResult(offset, relatedEntities.size(), totalCount, relatedEntities); } - private String computeEntityTypeWhereClause(@Nonnull final List sourceTypes, - @Nonnull final List destinationTypes) { + private String computeEntityTypeWhereClause( + @Nonnull final List sourceTypes, @Nonnull final List destinationTypes) { String whereClause = " WHERE left(type(r), 2)<>'r_' "; Boolean hasSourceTypes = sourceTypes != null && !sourceTypes.isEmpty(); Boolean hasDestTypes = destinationTypes != null && !destinationTypes.isEmpty(); if (hasSourceTypes && hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR ")), + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } else if (hasSourceTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + sourceTypes.stream().map(type -> "src:" + type).collect(Collectors.joining(" OR "))); } else if (hasDestTypes) { - whereClause = String.format(" WHERE left(type(r), 2)<>'r_' AND %s", - destinationTypes.stream().map(type -> "dest:" + type).collect(Collectors.joining(" OR "))); + whereClause = + String.format( + " WHERE left(type(r), 2)<>'r_' AND %s", + destinationTypes.stream() + .map(type -> "dest:" + type) + .collect(Collectors.joining(" OR "))); } return whereClause; } @@ -468,28 +582,25 @@ public void removeNode(@Nonnull final Urn urn) { } /** - * Remove relationships and reverse relationships by check incoming/outgoing relationships. - * for example: - * a-[consumes]->b, a<-[r_consumes]-b - * a-[produces]->b, a-[r_produces]->b - * should not remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. - * should remove a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b - * when relationshipDirection equal outgoing. + * Remove relationships and reverse relationships by check incoming/outgoing relationships. for + * example: a-[consumes]->b, a<-[r_consumes]-b a-[produces]->b, a-[r_produces]->b should not + * remove a<-[r_downstreamOf]-b when relationshipDirection equal incoming. should remove + * a-[consumes]->b, a<-[r_consumes]-b, a-[produces]->b, a-[r_produces]->b when + * relationshipDirection equal outgoing. * * @param urn Entity relationship type * @param relationshipTypes Entity relationship type * @param relationshipFilter Query relationship filter - * */ public void removeEdgesFromNode( @Nonnull final Urn urn, @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter) { - log.debug(String.format("Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", - urn, - relationshipTypes, - relationshipFilter)); + log.debug( + String.format( + "Removing Neo4j edge types from node with urn: %s, types: %s, filter: %s", + urn, relationshipTypes, relationshipFilter)); // also delete any relationship going to or from it final RelationshipDirection relationshipDirection = relationshipFilter.getDirection(); @@ -518,9 +629,13 @@ public void removeEdgesFromNode( relationshipTypeFilter = ""; if (!relationshipTypes.isEmpty()) { relationshipTypeFilter = - ":" + StringUtils.join(relationshipTypes, "|") + "|r_" + StringUtils.join(relationshipTypes, "|r_"); + ":" + + StringUtils.join(relationshipTypes, "|") + + "|r_" + + StringUtils.join(relationshipTypes, "|r_"); } - final String statementNoDirection = String.format(matchDeleteTemplate, relationshipTypeFilter); + final String statementNoDirection = + String.format(matchDeleteTemplate, relationshipTypeFilter); runQuery(buildStatement(statementNoDirection, params)).consume(); } else { for (Record typeDest : neo4jResult) { @@ -602,12 +717,13 @@ private synchronized ExecutionResult executeStatements(@Nonnull List try (final Session session = _driver.session(_sessionConfig)) { do { try { - session.writeTransaction(tx -> { - for (Statement statement : statements) { - tx.run(statement.getCommandText(), statement.getParams()); - } - return 0; - }); + session.writeTransaction( + tx -> { + for (Statement statement : statements) { + tx.run(statement.getCommandText(), statement.getParams()); + } + return 0; + }); lastException = null; break; } catch (Neo4jException e) { @@ -618,7 +734,8 @@ private synchronized ExecutionResult executeStatements(@Nonnull List if (lastException != null) { throw new RetryLimitReached( - "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", lastException); + "Failed to execute Neo4j write transaction after " + MAX_TRANSACTION_RETRY + " retries", + lastException); } stopWatch.stop(); @@ -650,7 +767,8 @@ private static String toCriterionString(@Nonnull String key, @Nonnull Object val } /** - * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link RelationshipFilter} to neo4j query criteria, filter criterion condition + * requires to be EQUAL. * * @param filter Query relationship filter * @return Neo4j criteria string @@ -661,7 +779,8 @@ private static String relationshipFilterToCriteria(@Nonnull RelationshipFilter f } /** - * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be EQUAL. + * Converts {@link Filter} to neo4j query criteria, filter criterion condition requires to be + * EQUAL. * * @param filter Query Filter * @return Neo4j criteria string @@ -674,9 +793,11 @@ private static String filterToCriteria(@Nonnull Filter filter) { private static String disjunctionToCriteria(final ConjunctiveCriterionArray disjunction) { if (disjunction.size() > 1) { // TODO: Support disjunctions (ORs). - throw new UnsupportedOperationException("Neo4j query filter only supports 1 set of conjunction criteria"); + throw new UnsupportedOperationException( + "Neo4j query filter only supports 1 set of conjunction criteria"); } - final CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + final CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); return criterionToString(criterionArray); } @@ -688,20 +809,21 @@ private static String disjunctionToCriteria(final ConjunctiveCriterionArray disj */ @Nonnull private static String criterionToString(@Nonnull CriterionArray criterionArray) { - if (!criterionArray.stream().allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { - throw new RuntimeException("Neo4j query filter only support EQUAL condition " + criterionArray); + if (!criterionArray.stream() + .allMatch(criterion -> Condition.EQUAL.equals(criterion.getCondition()))) { + throw new RuntimeException( + "Neo4j query filter only support EQUAL condition " + criterionArray); } final StringJoiner joiner = new StringJoiner(",", "{", "}"); - criterionArray.forEach(criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); + criterionArray.forEach( + criterion -> joiner.add(toCriterionString(criterion.getField(), criterion.getValue()))); return joiner.length() <= 2 ? "" : joiner.toString(); } - /** - * Gets Node based on Urn, if not exist, creates placeholder node. - */ + /** Gets Node based on Urn, if not exist, creates placeholder node. */ @Nonnull private Statement getOrInsertNode(@Nonnull Urn urn) { final String nodeType = urn.getEntityType(); @@ -721,32 +843,31 @@ public boolean supportsMultiHop() { } /** - * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to LineageSpec. - * for example: - * sourceType: dataset, relationshipType: downstreamOf. - * downstreamOf relationship type and outgoing relationship direction for dataset from LineageSpec, - * is inside upstreamEdges. - * source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) - * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest - * * - * sourceType: datajob, relationshipType: produces. - * produces relationship type and outgoing relationship direction for datajob from LineageSpec, - * is inside downstreamEdges. - * source(datajob) -[produces]-> dest means downstreamEdge for source(datajob) - * source(dataset) -[r_produces]-> dest, do not need to reverse source and dest + * Reverse incoming/outgoing direction check by compare sourceType and relationshipType to + * LineageSpec. for example: sourceType: dataset, relationshipType: downstreamOf. downstreamOf + * relationship type and outgoing relationship direction for dataset from LineageSpec, is inside + * upstreamEdges. source(dataset) -[downstreamOf]-> dest means upstreamEdge for source(dataset) + * dest -[r_downstreamOf]-> source(dataset), need reverse source and dest * sourceType: datajob, + * relationshipType: produces. produces relationship type and outgoing relationship direction for + * datajob from LineageSpec, is inside downstreamEdges. source(datajob) -[produces]-> dest means + * downstreamEdge for source(datajob) source(dataset) -[r_produces]-> dest, do not need to reverse + * source and dest * * @param sourceType Entity type * @param relationshipType Entity relationship type - * */ - private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String relationshipType) { + private boolean isSourceDestReversed( + @Nonnull String sourceType, @Nonnull String relationshipType) { // Get real direction by check INCOMING/OUTGOING direction and RelationshipType LineageRegistry.LineageSpec sourceLineageSpec = getLineageRegistry().getLineageSpec(sourceType); if (sourceLineageSpec != null) { - List upstreamCheck = sourceLineageSpec.getUpstreamEdges() - .stream() - .filter(t -> t.getDirection() == RelationshipDirection.OUTGOING && t.getType().equals(relationshipType)) - .collect(Collectors.toList()); + List upstreamCheck = + sourceLineageSpec.getUpstreamEdges().stream() + .filter( + t -> + t.getDirection() == RelationshipDirection.OUTGOING + && t.getType().equals(relationshipType)) + .collect(Collectors.toList()); if (!upstreamCheck.isEmpty() || sourceType.equals("schemaField")) { return true; } @@ -754,8 +875,7 @@ private boolean isSourceDestReversed(@Nonnull String sourceType, @Nonnull String return false; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 6985ceb00afd2..35d75de482007 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -40,24 +40,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class MostPopularSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -82,11 +81,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to determine whether DataHub usage index exists"); } @@ -95,15 +99,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getMostPopular").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -122,13 +126,15 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for all entity view events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entities with the most views - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT * 2); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT * 2); source.aggregation(aggregation); source.size(0); @@ -139,12 +145,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java index dc30d4c80abc0..0815ffadd05c1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyEditedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyEditedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyEdited").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently edited entities failed", e); @@ -122,16 +127,19 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity action events query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_ACTION_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -142,13 +150,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent() - .setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java index 0836c569ed5d1..47ffebee2e947 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlyViewedSource.java @@ -41,24 +41,23 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlyViewedSource implements RecommendationSource { - /** - * Entity Types that should be in scope for this type of recommendation. - */ - private static final Set SUPPORTED_ENTITY_TYPES = ImmutableSet.of(Constants.DATASET_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME - ); + /** Entity Types that should be in scope for this type of recommendation. */ + private static final Set SUPPORTED_ENTITY_TYPES = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME); + private final RestHighLevelClient _searchClient; private final IndexConvention _indexConvention; private final EntityService _entityService; @@ -83,11 +82,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -96,18 +100,19 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Override @WithSpan - public List getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlyViewed").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) - .map(Optional::get).limit(MAX_CONTENT) + .map(Optional::get) + .limit(MAX_CONTENT) .collect(Collectors.toList()); } catch (Exception e) { log.error("Search query to get most recently viewed entities failed", e); @@ -122,18 +127,23 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation query.must( - QueryBuilders.termQuery(ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), userUrn.toString())); + QueryBuilders.termQuery( + ESUtils.toKeywordField(DataHubUsageEventConstants.ACTOR_URN, false), + userUrn.toString())); query.must( - QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, DataHubUsageEventType.ENTITY_VIEW_EVENT.getType())); source.query(query); // Find the entity with the largest last viewed timestamp String lastViewed = "last_viewed"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) - .size(MAX_CONTENT) - .order(BucketOrder.aggregation(lastViewed, false)) - .subAggregation(AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(ESUtils.toKeywordField(DataHubUsageEventConstants.ENTITY_URN, false)) + .size(MAX_CONTENT) + .order(BucketOrder.aggregation(lastViewed, false)) + .subAggregation( + AggregationBuilders.max(lastViewed).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -144,12 +154,17 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { private Optional buildContent(@Nonnull String entityUrn) { Urn entity = UrnUtils.getUrn(entityUrn); - if (EntityUtils.checkIfRemoved(_entityService, entity) || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { + if (EntityUtils.checkIfRemoved(_entityService, entity) + || !RecommendationUtils.isSupportedEntityType(entity, SUPPORTED_ENTITY_TYPES)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setEntity(entity) - .setValue(entityUrn) - .setParams(new RecommendationParams().setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); + return Optional.of( + new RecommendationContent() + .setEntity(entity) + .setValue(entityUrn) + .setParams( + new RecommendationParams() + .setEntityProfileParams(new EntityProfileParams().setUrn(entity)))); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java index 75375df77ed6f..b862de320db36 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/EntityLineageResultCacheKey.java @@ -6,7 +6,6 @@ import java.time.temporal.TemporalUnit; import lombok.Data; - @Data public class EntityLineageResultCacheKey { private final Urn sourceUrn; @@ -15,17 +14,27 @@ public class EntityLineageResultCacheKey { private final Long endTimeMillis; private final Integer maxHops; - public EntityLineageResultCacheKey(Urn sourceUrn, LineageDirection direction, Long startTimeMillis, - Long endTimeMillis, Integer maxHops, TemporalUnit resolution) { + public EntityLineageResultCacheKey( + Urn sourceUrn, + LineageDirection direction, + Long startTimeMillis, + Long endTimeMillis, + Integer maxHops, + TemporalUnit resolution) { this.sourceUrn = sourceUrn; this.direction = direction; this.maxHops = maxHops; long endOffset = resolution.getDuration().getSeconds() * 1000; this.startTimeMillis = - startTimeMillis == null ? null : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); - this.endTimeMillis = endTimeMillis == null ? null - : Instant.ofEpochMilli(endTimeMillis + endOffset).truncatedTo(resolution).toEpochMilli(); - + startTimeMillis == null + ? null + : Instant.ofEpochMilli(startTimeMillis).truncatedTo(resolution).toEpochMilli(); + this.endTimeMillis = + endTimeMillis == null + ? null + : Instant.ofEpochMilli(endTimeMillis + endOffset) + .truncatedTo(resolution) + .toEpochMilli(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index 9b8e9bce7e670..f6358e4aeb207 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.search; -import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -10,6 +12,7 @@ import com.linkedin.data.template.LongMap; import com.linkedin.data.template.StringArray; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; import com.linkedin.metadata.graph.EntityLineageResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.LineageDirection; @@ -27,7 +30,6 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.search.utils.SearchUtils; import io.opentelemetry.extension.annotations.WithSpan; - import java.net.URISyntaxException; import java.time.temporal.ChronoUnit; import java.util.Collections; @@ -44,21 +46,17 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections.CollectionUtils; import org.springframework.cache.Cache; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - @RequiredArgsConstructor @Slf4j public class LineageSearchService { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) @@ -66,8 +64,7 @@ public class LineageSearchService { .setSkipHighlighting(true); private final SearchService _searchService; private final GraphService _graphService; - @Nullable - private final Cache cache; + @Nullable private final Cache cache; private final boolean cacheEnabled; private final SearchLineageCacheConfiguration cacheConfiguration; @@ -75,20 +72,27 @@ public class LineageSearchService { private static final String DEGREE_FILTER = "degree"; private static final String DEGREE_FILTER_INPUT = "degree.keyword"; - private static final AggregationMetadata DEGREE_FILTER_GROUP = new AggregationMetadata().setName(DEGREE_FILTER) - .setDisplayName("Degree of Dependencies") - .setAggregations(new LongMap()) - .setFilterValues(new FilterValueArray(ImmutableList.of(new FilterValue().setValue("1").setFacetCount(0), - new FilterValue().setValue("2").setFacetCount(0), new FilterValue().setValue("3+").setFacetCount(0)))); + private static final AggregationMetadata DEGREE_FILTER_GROUP = + new AggregationMetadata() + .setName(DEGREE_FILTER) + .setDisplayName("Degree of Dependencies") + .setAggregations(new LongMap()) + .setFilterValues( + new FilterValueArray( + ImmutableList.of( + new FilterValue().setValue("1").setFacetCount(0), + new FilterValue().setValue("2").setFacetCount(0), + new FilterValue().setValue("3+").setFacetCount(0)))); private static final int MAX_RELATIONSHIPS = 1000000; private static final int MAX_TERMS = 50000; - private static final Set PLATFORM_ENTITY_TYPES = ImmutableSet.of( - DATASET_ENTITY_NAME, - CHART_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME); + private static final Set PLATFORM_ENTITY_TYPES = + ImmutableSet.of( + DATASET_ENTITY_NAME, + CHART_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME); /** * Gets a list of documents that match given search request that is related to the input entity @@ -98,20 +102,32 @@ public class LineageSearchService { * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nullable SearchFlags searchFlags) { - - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable SearchFlags searchFlags) { + + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); long startTime = System.nanoTime(); log.debug("Cache enabled {}, Input :{}:", cacheEnabled, input); @@ -123,8 +139,9 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull } // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); CachedEntityLineageResult cachedLineageResult = null; if (cacheEnabled) { @@ -139,12 +156,12 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull FreshnessStats freshnessStats = new FreshnessStats().setCached(Boolean.FALSE); if (cachedLineageResult == null || finalFlags.isSkipCache()) { lineageResult = - _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, - endTimeMillis); + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { try { - cache.put(cacheKey, - new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } catch (Exception e) { log.warn("Failed to add cacheKey {}", cacheKey, e); } @@ -156,22 +173,36 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull systemFreshness.put("LineageGraphCache", cachedLineageResult.getTimestamp()); freshnessStats.setSystemFreshness(systemFreshness); // set up cache refill if needed - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.info("Cached lineage entry for: {} is older than one day. Will refill.", sourceUrn); Integer finalMaxHops = maxHops; - this.cacheRefillExecutor.submit(() -> { - log.debug("Cache refill started."); - CachedEntityLineageResult reFetchLineageResult = cache.get(cacheKey, CachedEntityLineageResult.class); - if (reFetchLineageResult == null || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis() - ) { - // we have to refetch - EntityLineageResult result = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, finalMaxHops, startTimeMillis, endTimeMillis); - cache.put(cacheKey, result); - log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); - } else { - log.debug("Cache refill not needed. {}", System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); - } - }); + this.cacheRefillExecutor.submit( + () -> { + log.debug("Cache refill started."); + CachedEntityLineageResult reFetchLineageResult = + cache.get(cacheKey, CachedEntityLineageResult.class); + if (reFetchLineageResult == null + || System.currentTimeMillis() - reFetchLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { + // we have to refetch + EntityLineageResult result = + _graphService.getLineage( + sourceUrn, + direction, + 0, + MAX_RELATIONSHIPS, + finalMaxHops, + startTimeMillis, + endTimeMillis); + cache.put(cacheKey, result); + log.debug("Refilled Cached lineage entry for: {}.", sourceUrn); + } else { + log.debug( + "Cache refill not needed. {}", + System.currentTimeMillis() - reFetchLineageResult.getTimestamp()); + } + }); } } @@ -179,68 +210,92 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); - String lineageGraphInfo = String.format("Lineage Graph = time(ms):%s size:%s", + String lineageGraphInfo = + String.format( + "Lineage Graph = time(ms):%s size:%s", (System.nanoTime() - startTime) / (1000.0 * 1000.0), lineageRelationships.size()); startTime = System.nanoTime(); long numEntities = 0; String codePath = null; try { Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); if (canDoLightning(lineageRelationships, input, reducedFilters, sortCriterion)) { codePath = "lightning"; // use lightning approach to return lineage search results - LineageSearchResult lineageSearchResult = getLightningSearchResult(lineageRelationships, - reducedFilters, from, size, new HashSet<>(entities)); + LineageSearchResult lineageSearchResult = + getLightningSearchResult( + lineageRelationships, reducedFilters, from, size, new HashSet<>(entities)); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lightning Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lightning Lineage entity result: {}", + lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } else { codePath = "tortoise"; - LineageSearchResult lineageSearchResult = getSearchResultInBatches(lineageRelationships, input, - reducedFilters, sortCriterion, from, size, finalFlags); + LineageSearchResult lineageSearchResult = + getSearchResultInBatches( + lineageRelationships, input, reducedFilters, sortCriterion, from, size, finalFlags); if (!lineageSearchResult.getEntities().isEmpty()) { - log.debug("Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); + log.debug( + "Lineage entity result: {}", lineageSearchResult.getEntities().get(0).toString()); } numEntities = lineageSearchResult.getNumEntities(); return lineageSearchResult; } } finally { - log.info("{}; Lineage Search({}) = time(ms):{} size:{}", lineageGraphInfo, codePath, - (System.nanoTime() - startTime) / (1000.0 * 1000.0), numEntities); + log.info( + "{}; Lineage Search({}) = time(ms):{} size:{}", + lineageGraphInfo, + codePath, + (System.nanoTime() - startTime) / (1000.0 * 1000.0), + numEntities); } } - @VisibleForTesting - boolean canDoLightning(List lineageRelationships, String input, Filter inputFilters, + boolean canDoLightning( + List lineageRelationships, + String input, + Filter inputFilters, SortCriterion sortCriterion) { - boolean simpleFilters = inputFilters == null || inputFilters.getOr() == null - || inputFilters.getOr() - .stream() - .allMatch(criterion -> criterion.getAnd() - .stream() - .allMatch(criterion1 -> "platform".equals(criterion1.getField()) || "origin".equals(criterion1.getField()) - ) - ); + boolean simpleFilters = + inputFilters == null + || inputFilters.getOr() == null + || inputFilters.getOr().stream() + .allMatch( + criterion -> + criterion.getAnd().stream() + .allMatch( + criterion1 -> + "platform".equals(criterion1.getField()) + || "origin".equals(criterion1.getField()))); return (lineageRelationships.size() > cacheConfiguration.getLightningThreshold()) - && input.equals("*") && simpleFilters && sortCriterion == null; + && input.equals("*") + && simpleFilters + && sortCriterion == null; } @VisibleForTesting - LineageSearchResult getLightningSearchResult(List lineageRelationships, - Filter inputFilters, int from, int size, Set entityNames) { + LineageSearchResult getLightningSearchResult( + List lineageRelationships, + Filter inputFilters, + int from, + int size, + Set entityNames) { // Contruct result objects - LineageSearchResult finalResult = new LineageSearchResult() - .setMetadata(new SearchResultMetadata()); + LineageSearchResult finalResult = + new LineageSearchResult().setMetadata(new SearchResultMetadata()); LineageSearchEntityArray lineageSearchEntityArray = new LineageSearchEntityArray(); AggregationMetadata entityTypeAgg = constructAggMetadata("Type", "entity"); AggregationMetadata platformTypeAgg = constructAggMetadata("Platform", "platform"); @@ -258,7 +313,7 @@ LineageSearchResult getLightningSearchResult(List lineageRe // environment int start = 0; int numElements = 0; - for (LineageRelationship relnship: lineageRelationships) { + for (LineageRelationship relnship : lineageRelationships) { Urn entityUrn = relnship.getEntity(); String entityType = entityUrn.getEntityType(); // Apply platform, entity types, and environment filters @@ -274,16 +329,27 @@ LineageSearchResult getLightningSearchResult(List lineageRe Set platformCriteriaValues = null; Set originCriteriaValues = null; if (inputFilters != null && inputFilters.getOr() != null) { - platformCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "platform".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); - originCriteriaValues = inputFilters.getOr().stream().map(ConjunctiveCriterion::getAnd).flatMap( - CriterionArray::stream).filter(criterion -> "origin".equals(criterion.getField())).map(Criterion::getValue) - .collect(Collectors.toSet()); + platformCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "platform".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); + originCriteriaValues = + inputFilters.getOr().stream() + .map(ConjunctiveCriterion::getAnd) + .flatMap(CriterionArray::stream) + .filter(criterion -> "origin".equals(criterion.getField())) + .map(Criterion::getValue) + .collect(Collectors.toSet()); } - boolean isNotFiltered = (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) - && (CollectionUtils.isEmpty(platformCriteriaValues) || (platform != null && platformCriteriaValues.contains(platform))) - && (CollectionUtils.isEmpty(originCriteriaValues) || (environment != null && originCriteriaValues.contains(environment))); + boolean isNotFiltered = + (entityNames.isEmpty() || entityNames.contains(entityUrn.getEntityType())) + && (CollectionUtils.isEmpty(platformCriteriaValues) + || (platform != null && platformCriteriaValues.contains(platform))) + && (CollectionUtils.isEmpty(originCriteriaValues) + || (environment != null && originCriteriaValues.contains(environment))); if (isNotFiltered) { start++; @@ -297,53 +363,59 @@ LineageSearchResult getLightningSearchResult(List lineageRe } // entityType - entityTypeAggregations.compute(entityType, - (key, value) -> value == null ? 1L : ++value); + entityTypeAggregations.compute(entityType, (key, value) -> value == null ? 1L : ++value); // platform if (platform != null) { - platformTypeAggregations.compute(platform, - (key, value) -> value == null ? 1L : ++value); + platformTypeAggregations.compute(platform, (key, value) -> value == null ? 1L : ++value); } // environment if (environment != null) { - environmentAggregations.compute(environment, - (key, value) -> value == null ? 1L : ++value); + environmentAggregations.compute( + environment, (key, value) -> value == null ? 1L : ++value); } } } aggregationMetadataArray.add(DEGREE_FILTER_GROUP); if (platformTypeAggregations.keySet().size() > 0) { - for (Map.Entry platformCount: platformTypeAggregations.entrySet()) { + for (Map.Entry platformCount : platformTypeAggregations.entrySet()) { try { - platformTypeAgg.getFilterValues().add(new FilterValue() - .setValue(platformCount.getKey()) - .setFacetCount(platformCount.getValue()) - .setEntity(Urn.createFromString(platformCount.getKey())) - ); + platformTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(platformCount.getKey()) + .setFacetCount(platformCount.getValue()) + .setEntity(Urn.createFromString(platformCount.getKey()))); platformTypeAgg.getAggregations().put(platformCount.getKey(), platformCount.getValue()); } catch (URISyntaxException e) { log.warn("Unexpected exception: {}", e.getMessage()); } } - aggregationMetadataArray.add(platformTypeAgg); + aggregationMetadataArray.add(platformTypeAgg); } if (entityTypeAggregations.keySet().size() > 0) { - for (Map.Entry entityCount: entityTypeAggregations.entrySet()) { - entityTypeAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry entityCount : entityTypeAggregations.entrySet()) { + entityTypeAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); entityTypeAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(entityTypeAgg); } if (environmentAggregations.keySet().size() > 0) { - for (Map.Entry entityCount: environmentAggregations.entrySet()) { - environmentAgg.getFilterValues().add(new FilterValue() - .setValue(entityCount.getKey()) - .setFacetCount(entityCount.getValue())); + for (Map.Entry entityCount : environmentAggregations.entrySet()) { + environmentAgg + .getFilterValues() + .add( + new FilterValue() + .setValue(entityCount.getKey()) + .setFacetCount(entityCount.getValue())); environmentAgg.getAggregations().put(entityCount.getKey(), entityCount.getValue()); } aggregationMetadataArray.add(environmentAgg); @@ -374,18 +446,24 @@ private String getPlatform(String entityType, Urn entityUrn) { return platform; } - // Necessary so we don't filter out schemaField entities and so that we search to get the parent reference entity - private LineageRelationshipArray convertSchemaFieldRelationships(EntityLineageResult lineageResult) { - return lineageResult.getRelationships().stream().map(relationship -> { - if (relationship.getEntity().getEntityType().equals("schemaField")) { - Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); - relationship.setEntity(entity); - } - return relationship; - }).collect(Collectors.toCollection(LineageRelationshipArray::new)); + // Necessary so we don't filter out schemaField entities and so that we search to get the parent + // reference entity + private LineageRelationshipArray convertSchemaFieldRelationships( + EntityLineageResult lineageResult) { + return lineageResult.getRelationships().stream() + .map( + relationship -> { + if (relationship.getEntity().getEntityType().equals("schemaField")) { + Urn entity = getSchemaFieldReferenceUrn(relationship.getEntity()); + relationship.setEntity(entity); + } + return relationship; + }) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); } - private Map generateUrnToRelationshipMap(List lineageRelationships) { + private Map generateUrnToRelationshipMap( + List lineageRelationships) { Map urnToRelationship = new HashMap<>(); for (LineageRelationship relationship : lineageRelationships) { LineageRelationship existingRelationship = urnToRelationship.get(relationship.getEntity()); @@ -401,32 +479,49 @@ private Map generateUrnToRelationshipMap(List lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, int from, int size, + private LineageSearchResult getSearchResultInBatches( + List lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageSearchResult finalResult = - new LineageSearchResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageSearchResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setFrom(from) .setPageSize(size) .setNumEntities(0); - List> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int queryFrom = from; int querySize = size; for (List batch : batchedRelationships) { - List entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageSearchResult resultForBatch = buildLineageSearchResult( - _searchService.searchAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, queryFrom, querySize, - finalFlags), urnToRelationship); + LineageSearchResult resultForBatch = + buildLineageSearchResult( + _searchService.searchAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + queryFrom, + querySize, + finalFlags), + urnToRelationship); queryFrom = Math.max(0, from - resultForBatch.getNumEntities()); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = merge(finalResult, resultForBatch); @@ -442,34 +537,44 @@ public static LineageSearchResult merge(LineageSearchResult one, LineageSearchRe finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map aggregations = one.getMetadata() + Map aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); return finalResult; } private Predicate convertFilterToPredicate(List degreeFilterValues) { - return degreeFilterValues.stream().map(value -> { - switch (value) { - case "1": - return (Predicate) (Integer numHops) -> (numHops == 1); - case "2": - return (Predicate) (Integer numHops) -> (numHops == 2); - case "3+": - return (Predicate) (Integer numHops) -> (numHops > 2); - default: - throw new IllegalArgumentException(String.format("%s is not a valid filter value for degree filters", value)); - } - }).reduce(x -> false, Predicate::or); + return degreeFilterValues.stream() + .map( + value -> { + switch (value) { + case "1": + return (Predicate) (Integer numHops) -> (numHops == 1); + case "2": + return (Predicate) (Integer numHops) -> (numHops == 2); + case "3+": + return (Predicate) (Integer numHops) -> (numHops > 2); + default: + throw new IllegalArgumentException( + String.format("%s is not a valid filter value for degree filters", value)); + } + }) + .reduce(x -> false, Predicate::or); } private Urn getSchemaFieldReferenceUrn(Urn urn) { @@ -484,24 +589,29 @@ private Urn getSchemaFieldReferenceUrn(Urn urn) { return urn; } - private List filterRelationships(@Nonnull EntityLineageResult lineageResult, - @Nonnull Set entities, @Nullable Filter inputFilters) { - Stream relationshipsFilteredByEntities = lineageResult.getRelationships().stream(); + private List filterRelationships( + @Nonnull EntityLineageResult lineageResult, + @Nonnull Set entities, + @Nullable Filter inputFilters) { + Stream relationshipsFilteredByEntities = + lineageResult.getRelationships().stream(); if (!entities.isEmpty()) { - relationshipsFilteredByEntities = relationshipsFilteredByEntities.filter( - relationship -> entities.contains(relationship.getEntity().getEntityType())); + relationshipsFilteredByEntities = + relationshipsFilteredByEntities.filter( + relationship -> entities.contains(relationship.getEntity().getEntityType())); } if (inputFilters != null && !CollectionUtils.isEmpty(inputFilters.getOr())) { ConjunctiveCriterion conjunctiveCriterion = inputFilters.getOr().get(0); if (conjunctiveCriterion.hasAnd()) { - List degreeFilter = conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) - .flatMap(c -> c.getValues().stream()) - .collect(Collectors.toList()); + List degreeFilter = + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)) + .flatMap(c -> c.getValues().stream()) + .collect(Collectors.toList()); if (!degreeFilter.isEmpty()) { Predicate degreePredicate = convertFilterToPredicate(degreeFilter); - return relationshipsFilteredByEntities.filter(relationship -> degreePredicate.test(relationship.getDegree())) + return relationshipsFilteredByEntities + .filter(relationship -> degreePredicate.test(relationship.getDegree())) .collect(Collectors.toList()); } } @@ -510,9 +620,12 @@ private List filterRelationships(@Nonnull EntityLineageResu } private Filter buildFilter(@Nonnull Set urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); if (inputFilters == null) { return QueryUtils.newFilter(urnMatchCriterion); } @@ -527,21 +640,27 @@ private Filter buildFilter(@Nonnull Set urns, @Nullable Filter inputFilters return QueryUtils.newFilter(urnMatchCriterion); } - private LineageSearchResult buildLineageSearchResult(@Nonnull SearchResult searchResult, - Map urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); - return new LineageSearchResult().setEntities(new LineageSearchEntityArray(searchResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) + private LineageSearchResult buildLineageSearchResult( + @Nonnull SearchResult searchResult, Map urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); + return new LineageSearchResult() + .setEntities( + new LineageSearchEntityArray( + searchResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) .setFrom(searchResult.getFrom()) .setPageSize(searchResult.getPageSize()) .setNumEntities(searchResult.getNumEntities()); } - private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searchEntity, - @Nullable LineageRelationship lineageRelationship) { + private LineageSearchEntity buildLineageSearchEntity( + @Nonnull SearchEntity searchEntity, @Nullable LineageRelationship lineageRelationship) { LineageSearchEntity entity = new LineageSearchEntity(searchEntity.data()); if (lineageRelationship != null) { entity.setPaths(lineageRelationship.getPaths()); @@ -558,34 +677,50 @@ private LineageSearchEntity buildLineageSearchEntity(@Nonnull SearchEntity searc * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text * @param maxHops the maximum number of hops away to search for. If null, defaults to 1000 - * @param inputFilters the request map with fields and values as filters to be applied to search hits + * @param inputFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return - * @return a {@link LineageSearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link LineageSearchResult} that contains a list of matched documents and related + * search result metadata */ @Nonnull @WithSpan - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nullable String input, @Nullable Integer maxHops, @Nullable Filter inputFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int size, @Nullable Long startTimeMillis, - @Nullable Long endTimeMillis, @Nonnull SearchFlags searchFlags) { + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nullable String input, + @Nullable Integer maxHops, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nonnull SearchFlags searchFlags) { // Cache multihop result for faster performance - final EntityLineageResultCacheKey cacheKey = new EntityLineageResultCacheKey(sourceUrn, direction, startTimeMillis, - endTimeMillis, maxHops, ChronoUnit.DAYS); - CachedEntityLineageResult cachedLineageResult = cacheEnabled - ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; + final EntityLineageResultCacheKey cacheKey = + new EntityLineageResultCacheKey( + sourceUrn, direction, startTimeMillis, endTimeMillis, maxHops, ChronoUnit.DAYS); + CachedEntityLineageResult cachedLineageResult = + cacheEnabled ? cache.get(cacheKey, CachedEntityLineageResult.class) : null; EntityLineageResult lineageResult; if (cachedLineageResult == null) { maxHops = maxHops != null ? maxHops : 1000; - lineageResult = _graphService.getLineage(sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, - startTimeMillis, endTimeMillis); + lineageResult = + _graphService.getLineage( + sourceUrn, direction, 0, MAX_RELATIONSHIPS, maxHops, startTimeMillis, endTimeMillis); if (cacheEnabled) { - cache.put(cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); + cache.put( + cacheKey, new CachedEntityLineageResult(lineageResult, System.currentTimeMillis())); } } else { lineageResult = cachedLineageResult.getEntityLineageResult(); - if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() > cacheConfiguration.getTTLMillis()) { + if (System.currentTimeMillis() - cachedLineageResult.getTimestamp() + > cacheConfiguration.getTTLMillis()) { log.warn("Cached lineage entry for: {} is older than one day.", sourceUrn); } } @@ -594,39 +729,67 @@ public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageRelationshipArray updatedRelationships = convertSchemaFieldRelationships(lineageResult); lineageResult.setRelationships(updatedRelationships); - // Filter hopped result based on the set of entities to return and inputFilters before sending to search + // Filter hopped result based on the set of entities to return and inputFilters before sending + // to search List lineageRelationships = filterRelationships(lineageResult, new HashSet<>(entities), inputFilters); Filter reducedFilters = - SearchUtils.removeCriteria(inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); - return getScrollResultInBatches(lineageRelationships, input != null ? input : "*", reducedFilters, sortCriterion, - scrollId, keepAlive, size, searchFlags); + SearchUtils.removeCriteria( + inputFilters, criterion -> criterion.getField().equals(DEGREE_FILTER_INPUT)); + return getScrollResultInBatches( + lineageRelationships, + input != null ? input : "*", + reducedFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } // Search service can only take up to 50K term filter, so query search service in batches - private LineageScrollResult getScrollResultInBatches(List lineageRelationships, - @Nonnull String input, @Nullable Filter inputFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, - @Nonnull String keepAlive, int size, @Nonnull SearchFlags searchFlags) { - final SearchFlags finalFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + private LineageScrollResult getScrollResultInBatches( + List lineageRelationships, + @Nonnull String input, + @Nullable Filter inputFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int size, + @Nonnull SearchFlags searchFlags) { + final SearchFlags finalFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); LineageScrollResult finalResult = - new LineageScrollResult().setEntities(new LineageSearchEntityArray(Collections.emptyList())) + new LineageScrollResult() + .setEntities(new LineageSearchEntityArray(Collections.emptyList())) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) .setPageSize(size) .setNumEntities(0); - List> batchedRelationships = Lists.partition(lineageRelationships, MAX_TERMS); + List> batchedRelationships = + Lists.partition(lineageRelationships, MAX_TERMS); int querySize = size; for (List batch : batchedRelationships) { - List entitiesToQuery = batch.stream() - .map(relationship -> relationship.getEntity().getEntityType()) - .distinct() - .collect(Collectors.toList()); + List entitiesToQuery = + batch.stream() + .map(relationship -> relationship.getEntity().getEntityType()) + .distinct() + .collect(Collectors.toList()); Map urnToRelationship = generateUrnToRelationshipMap(batch); Filter finalFilter = buildFilter(urnToRelationship.keySet(), inputFilters); - LineageScrollResult resultForBatch = buildLineageScrollResult( - _searchService.scrollAcrossEntities(entitiesToQuery, input, finalFilter, sortCriterion, scrollId, keepAlive, querySize, - finalFlags), urnToRelationship); + LineageScrollResult resultForBatch = + buildLineageScrollResult( + _searchService.scrollAcrossEntities( + entitiesToQuery, + input, + finalFilter, + sortCriterion, + scrollId, + keepAlive, + querySize, + finalFlags), + urnToRelationship); querySize = Math.max(0, size - resultForBatch.getEntities().size()); finalResult = mergeScrollResult(finalResult, resultForBatch); } @@ -635,16 +798,23 @@ private LineageScrollResult getScrollResultInBatches(List l return finalResult.setPageSize(size); } - private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrollResult, - Map urnToRelationship) { - AggregationMetadataArray aggregations = new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); - LineageScrollResult lineageScrollResult = new LineageScrollResult().setEntities(new LineageSearchEntityArray(scrollResult.getEntities() - .stream() - .map(searchEntity -> buildLineageSearchEntity(searchEntity, urnToRelationship.get(searchEntity.getEntity()))) - .collect(Collectors.toList()))) - .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + private LineageScrollResult buildLineageScrollResult( + @Nonnull ScrollResult scrollResult, Map urnToRelationship) { + AggregationMetadataArray aggregations = + new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); + LineageScrollResult lineageScrollResult = + new LineageScrollResult() + .setEntities( + new LineageSearchEntityArray( + scrollResult.getEntities().stream() + .map( + searchEntity -> + buildLineageSearchEntity( + searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + .collect(Collectors.toList()))) + .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { lineageScrollResult.setScrollId(scrollResult.getScrollId()); @@ -653,23 +823,30 @@ private LineageScrollResult buildLineageScrollResult(@Nonnull ScrollResult scrol } @SneakyThrows - public static LineageScrollResult mergeScrollResult(LineageScrollResult one, LineageScrollResult two) { + public static LineageScrollResult mergeScrollResult( + LineageScrollResult one, LineageScrollResult two) { LineageScrollResult finalResult = one.clone(); finalResult.getEntities().addAll(two.getEntities()); finalResult.setNumEntities(one.getNumEntities() + two.getNumEntities()); - Map aggregations = one.getMetadata() + Map aggregations = + one.getMetadata().getAggregations().stream() + .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); + two.getMetadata() .getAggregations() - .stream() - .collect(Collectors.toMap(AggregationMetadata::getName, Function.identity())); - two.getMetadata().getAggregations().forEach(metadata -> { - if (aggregations.containsKey(metadata.getName())) { - aggregations.put(metadata.getName(), SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); - } else { - aggregations.put(metadata.getName(), metadata); - } - }); - finalResult.getMetadata().setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); + .forEach( + metadata -> { + if (aggregations.containsKey(metadata.getName())) { + aggregations.put( + metadata.getName(), + SearchUtils.merge(aggregations.get(metadata.getName()), metadata)); + } else { + aggregations.put(metadata.getName(), metadata); + } + }); + finalResult + .getMetadata() + .setAggregations(new AggregationMetadataArray(FilterUtils.rankFilterGroups(aggregations))); if (two.getScrollId() != null) { finalResult.setScrollId(two.getScrollId()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index c99e4a94feb29..3bcc163613c5e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; import com.linkedin.data.template.LongMap; import com.linkedin.metadata.query.SearchFlags; @@ -21,9 +23,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchService { private final CachingEntitySearchService _cachingEntitySearchService; @@ -41,36 +40,52 @@ public SearchService( public Map docCountPerEntity(@Nonnull List entityNames) { return entityNames.stream() - .collect(Collectors.toMap(Function.identity(), - entityName -> _entityDocCountCache.getEntityDocCount().getOrDefault(entityName.toLowerCase(), 0L))); + .collect( + Collectors.toMap( + Function.identity(), + entityName -> + _entityDocCountCache + .getEntityDocCount() + .getOrDefault(entityName.toLowerCase(), 0L))); } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { List entitiesToSearch = getEntitiesToSearch(entityNames); if (entitiesToSearch.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } SearchResult result = - _cachingEntitySearchService.search(entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); + _cachingEntitySearchService.search( + entitiesToSearch, input, postFilters, sortCriterion, from, size, searchFlags, null); try { - return result.copy().setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); + return result + .copy() + .setEntities(new SearchEntityArray(_searchRanker.rank(result.getEntities()))); } catch (Exception e) { log.error("Failed to rank: {}, exception - {}", result, e.toString()); throw new RuntimeException("Failed to rank " + result.toString()); @@ -78,37 +93,55 @@ public SearchResult search(@Nonnull List entityNames, @Nonnull String in } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, @Nullable SearchFlags searchFlags) { - return searchAcrossEntities(entities, input, postFilters, sortCriterion, from, size, searchFlags, null); + return searchAcrossEntities( + entities, input, postFilters, sortCriterion, from, size, searchFlags, null); } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, from, size)); + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List facets) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, from, size)); // DEPRECATED - // This is the legacy version of `_entityType`-- it operates as a special case and does not support ORs, Unions, etc. - // We will still provide it for backwards compatibility but when sending filters to the backend use the new - // filter name `_entityType` that we provide above. This is just provided to prevent a breaking change for old clients. + // This is the legacy version of `_entityType`-- it operates as a special case and does not + // support ORs, Unions, etc. + // We will still provide it for backwards compatibility but when sending filters to the backend + // use the new + // filter name `_entityType` that we provide above. This is just provided to prevent a breaking + // change for old clients. boolean aggregateByLegacyEntityFacet = facets != null && facets.contains("entity"); if (aggregateByLegacyEntityFacet) { facets = new ArrayList<>(facets); @@ -119,29 +152,49 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); } - SearchResult result = _cachingEntitySearchService.search(nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); + SearchResult result = + _cachingEntitySearchService.search( + nonEmptyEntities, input, postFilters, sortCriterion, from, size, searchFlags, facets); if (facets == null || facets.contains("entity") || facets.contains("_entityType")) { - Optional entityTypeAgg = result.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)).findFirst(); + Optional entityTypeAgg = + result.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals(INDEX_VIRTUAL_FIELD)) + .findFirst(); if (entityTypeAgg.isPresent()) { LongMap numResultsPerEntity = entityTypeAgg.get().getAggregations(); - result.getMetadata() + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(numResultsPerEntity) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(numResultsPerEntity) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } else { - // Should not happen due to the adding of the _entityType aggregation before, but if it does, best-effort count of entity types + // Should not happen due to the adding of the _entityType aggregation before, but if it + // does, best-effort count of entity types // Will not include entity types that had 0 results - Map numResultsPerEntity = result.getEntities().stream().collect(Collectors.groupingBy( - entity -> entity.getEntity().getEntityType(), Collectors.counting())); - result.getMetadata() + Map numResultsPerEntity = + result.getEntities().stream() + .collect( + Collectors.groupingBy( + entity -> entity.getEntity().getEntityType(), Collectors.counting())); + result + .getMetadata() .getAggregations() - .add(new AggregationMetadata().setName("entity") - .setDisplayName("Type") - .setAggregations(new LongMap(numResultsPerEntity)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(numResultsPerEntity, Collections.emptySet())))); + .add( + new AggregationMetadata() + .setName("entity") + .setDisplayName("Type") + .setAggregations(new LongMap(numResultsPerEntity)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + numResultsPerEntity, Collections.emptySet())))); } } return result; @@ -149,15 +202,18 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul /** * If no entities are provided, fallback to the list of non-empty entities + * * @param inputEntities the requested entities * @return some entities to search */ private List getEntitiesToSearch(@Nonnull List inputEntities) { List nonEmptyEntities; - List lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); + List lowercaseEntities = + inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); if (lowercaseEntities.isEmpty()) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getNonEmptyEntities").time()) { nonEmptyEntities = _entityDocCountCache.getNonEmptyEntities(); } } else { @@ -168,35 +224,53 @@ private List getEntitiesToSearch(@Nonnull List inputEntities) { } /** - * Gets a list of documents that match given search request across multiple entities. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request across multiple entities. The results + * are aggregated and filters are applied to the search hits and not the aggregation results. * * @param entities list of entities to search (If empty, searches across all entities) * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier for passing to search backend * @param size the number of search hits to return * @param searchFlags optional set of flags to control search behavior - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter postFilters, @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, - int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); List entitiesToSearch = getEntitiesToSearch(entities); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); } - return _cachingEntitySearchService.scroll(entitiesToSearch, input, postFilters, sortCriterion, scrollId, keepAlive, size, searchFlags); + return _cachingEntitySearchService.scroll( + entitiesToSearch, + input, + postFilters, + sortCriterion, + scrollId, + keepAlive, + size, + searchFlags); } private static SearchResult getEmptySearchResult(int from, int size) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(from) .setPageSize(size) @@ -204,7 +278,8 @@ private static SearchResult getEmptySearchResult(int from, int size) { } private static ScrollResult getEmptyScrollResult(int size) { - return new ScrollResult().setEntities(new SearchEntityArray()) + return new ScrollResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setPageSize(size) .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java index cc7cd8ce28bae..0ecdb83ed20ee 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CacheableSearcher.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.cache; +import static com.datahub.util.RecordUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; @@ -16,23 +18,17 @@ import lombok.Value; import org.springframework.cache.Cache; -import static com.datahub.util.RecordUtils.*; - - -/** - * Wrapper class to allow searching in batches and caching the results. - */ +/** Wrapper class to allow searching in batches and caching the results. */ @RequiredArgsConstructor public class CacheableSearcher { - @Nonnull - private final Cache cache; + @Nonnull private final Cache cache; private final int batchSize; - // Function that executes search and retrieves the search result given the query batch (from, size) + // Function that executes search and retrieves the search result given the query batch (from, + // size) private final Function searcher; // Function that generates the cache key given the query batch (from, size) private final Function cacheKeyGenerator; - @Nullable - private final SearchFlags searchFlags; + @Nullable private final SearchFlags searchFlags; private final boolean enableCache; @Value @@ -42,9 +38,10 @@ public static class QueryPagination implements Serializable { } /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This let's us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This let's us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getSearchResults(int from, int size) { try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getSearchResults").time()) { @@ -67,14 +64,16 @@ public SearchResult getSearchResults(int from, int size) { resultEntities.addAll(batchedResult.getEntities().subList(startInBatch, endInBatch)); foundStart = true; } - // If current batch is smaller than the requested batch size, the next batch will return empty. + // If current batch is smaller than the requested batch size, the next batch will return + // empty. if (currentBatchSize < batchSize) { break; } resultsSoFar += currentBatchSize; batchId++; } while (resultsSoFar < from + size); - return new SearchResult().setEntities(new SearchEntityArray(resultEntities)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultEntities)) .setMetadata(batchedResult.getMetadata()) .setFrom(from) .setPageSize(size) @@ -93,13 +92,16 @@ private SearchResult getBatch(int batchId) { if (enableCache) { K cacheKey = cacheKeyGenerator.apply(batch); if ((searchFlags == null || !searchFlags.isSkipCache())) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getBatch_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "getBatch_cache_access").time(); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(SearchResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "getBatch_cache_miss").time(); result = searcher.apply(batch); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java index 49fd3157437d1..9d4cb0c9ac613 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/CachedEntityLineageResult.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.graph.EntityLineageResult; -import java.io.Serializable; -import lombok.Data; - import static com.datahub.util.RecordUtils.*; import static com.linkedin.metadata.search.utils.GZIPUtil.*; +import com.linkedin.metadata.graph.EntityLineageResult; +import java.io.Serializable; +import lombok.Data; @Data public class CachedEntityLineageResult implements Serializable { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java index 95f208e185df1..2c99c71acf749 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/cache/EntityDocCountCache.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.search.cache; -import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.google.common.base.Suppliers; +import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.utils.ConcurrencyUtils; @@ -13,24 +13,27 @@ import java.util.function.Supplier; import java.util.stream.Collectors; - public class EntityDocCountCache { private final EntityRegistry _entityRegistry; private final EntitySearchService _entitySearchService; private final Supplier> entityDocCount; - public EntityDocCountCache(EntityRegistry entityRegistry, EntitySearchService entitySearchService, + public EntityDocCountCache( + EntityRegistry entityRegistry, + EntitySearchService entitySearchService, EntityDocCountCacheConfiguration config) { _entityRegistry = entityRegistry; _entitySearchService = entitySearchService; - entityDocCount = Suppliers.memoizeWithExpiration(this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); + entityDocCount = + Suppliers.memoizeWithExpiration( + this::fetchEntityDocCount, config.getTtlSeconds(), TimeUnit.SECONDS); } private Map fetchEntityDocCount() { - return ConcurrencyUtils - .transformAndCollectAsync(_entityRegistry.getEntitySpecs().keySet(), - Function.identity(), - Collectors.toMap(Function.identity(), _entitySearchService::docCount)); + return ConcurrencyUtils.transformAndCollectAsync( + _entityRegistry.getEntitySpecs().keySet(), + Function.identity(), + Collectors.toMap(Function.identity(), _entitySearchService::docCount)); } @WithSpan @@ -39,8 +42,7 @@ public Map getEntityDocCount() { } public List getNonEmptyEntities() { - return getEntityDocCount().entrySet() - .stream() + return getEntityDocCount().entrySet().stream() .filter(entry -> entry.getValue() > 0) .map(Map.Entry::getKey) .collect(Collectors.toList()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java index db414d70603dc..eaeae0cfc1556 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/client/CachingEntitySearchService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.client; +import static com.datahub.util.RecordUtils.toJsonString; +import static com.datahub.util.RecordUtils.toRecordTemplate; + import com.codahale.metrics.Timer; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; @@ -21,25 +24,23 @@ import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; -import static com.datahub.util.RecordUtils.toJsonString; -import static com.datahub.util.RecordUtils.toRecordTemplate; - - @RequiredArgsConstructor public class CachingEntitySearchService { private static final String ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME = "entitySearchServiceSearch"; - private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = "entitySearchServiceAutoComplete"; + private static final String ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME = + "entitySearchServiceAutoComplete"; private static final String ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME = "entitySearchServiceBrowse"; public static final String ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME = "entitySearchServiceScroll"; private final CacheManager cacheManager; - private final EntitySearchService entitySearchService; // This is a shared component, also used in search aggregation + private final EntitySearchService + entitySearchService; // This is a shared component, also used in search aggregation private final int batchSize; private final boolean enableCache; /** - * Retrieves cached search results. If the query has been cached, this will return quickly. If not, a full - * search request will be made. + * Retrieves cached search results. If the query has been cached, this will return quickly. If + * not, a full search request will be made. * * @param entityName the name of the entity to search * @param query the search query @@ -49,7 +50,6 @@ public class CachingEntitySearchService { * @param size the count * @param flags additional search flags * @param facets list of facets we want aggregations for - * * @return a {@link SearchResult} containing the requested batch of search results */ public SearchResult search( @@ -61,7 +61,8 @@ public SearchResult search( int size, @Nullable SearchFlags flags, @Nullable List facets) { - return getCachedSearchResults(entityNames, query, filters, sortCriterion, from, size, flags, facets); + return getCachedSearchResults( + entityNames, query, filters, sortCriterion, from, size, flags, facets); } /** @@ -72,7 +73,6 @@ public SearchResult search( * @param filters the filters to include * @param limit the max number of results to return * @param flags additional search flags - * * @return a {@link SearchResult} containing the requested batch of search results */ public AutoCompleteResult autoComplete( @@ -93,7 +93,6 @@ public AutoCompleteResult autoComplete( * @param filters the request map with fields and values as filters * @param from index of the first entity located in path * @param size the max number of entities contained in the response - * * @return a {@link SearchResult} containing the requested batch of search results */ public BrowseResult browse( @@ -107,8 +106,8 @@ public BrowseResult browse( } /** - * Retrieves cached scroll results. If the query has been cached, this will return quickly. If not, a full - * scroll request will be made. + * Retrieves cached scroll results. If the query has been cached, this will return quickly. If + * not, a full scroll request will be made. * * @param entities the names of the entities to search * @param query the search query @@ -118,7 +117,6 @@ public BrowseResult browse( * @param keepAlive the string representation of how long to keep point in time alive * @param size the count * @param flags additional search flags - * * @return a {@link ScrollResult} containing the requested batch of scroll results */ public ScrollResult scroll( @@ -130,15 +128,15 @@ public ScrollResult scroll( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - return getCachedScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); + return getCachedScrollResults( + entities, query, filters, sortCriterion, scrollId, keepAlive, size, flags); } - - /** - * Get search results corresponding to the input "from" and "size" - * It goes through batches, starting from the beginning, until we get enough results to return - * This lets us have batches that return a variable number of results (we have no idea which batch the "from" "size" page corresponds to) + * Get search results corresponding to the input "from" and "size" It goes through batches, + * starting from the beginning, until we get enough results to return This lets us have batches + * that return a variable number of results (we have no idea which batch the "from" "size" page + * corresponds to) */ public SearchResult getCachedSearchResults( @Nonnull List entityNames, @@ -150,19 +148,33 @@ public SearchResult getCachedSearchResults( @Nullable SearchFlags flags, @Nullable List facets) { return new CacheableSearcher<>( - cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), - batchSize, - querySize -> getRawSearchResults(entityNames, query, filters, sortCriterion, querySize.getFrom(), - querySize.getSize(), flags, facets), - querySize -> Septet.with(entityNames, query, filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, flags != null ? toJsonString(flags) : null, - facets, querySize), flags, enableCache).getSearchResults(from, size); + cacheManager.getCache(ENTITY_SEARCH_SERVICE_SEARCH_CACHE_NAME), + batchSize, + querySize -> + getRawSearchResults( + entityNames, + query, + filters, + sortCriterion, + querySize.getFrom(), + querySize.getSize(), + flags, + facets), + querySize -> + Septet.with( + entityNames, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + facets, + querySize), + flags, + enableCache) + .getSearchResults(from, size); } - - /** - * Returns cached auto-complete results. - */ + /** Returns cached auto-complete results. */ public AutoCompleteResult getCachedAutoCompleteResults( @Nonnull String entityName, @Nonnull String input, @@ -170,19 +182,29 @@ public AutoCompleteResult getCachedAutoCompleteResults( @Nullable Filter filters, int limit, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_AUTOCOMPLETE_CACHE_NAME); AutoCompleteResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); - Object cacheKey = Sextet.with(entityName, input, field, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, limit); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedAutoCompleteResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "autocomplete_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + input, + field, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + limit); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(AutoCompleteResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "autocomplete_cache_miss").time(); result = getRawAutoCompleteResults(entityName, input, field, filters, limit); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -196,9 +218,7 @@ public AutoCompleteResult getCachedAutoCompleteResults( } } - /** - * Returns cached browse results. - */ + /** Returns cached browse results. */ public BrowseResult getCachedBrowseResults( @Nonnull String entityName, @Nonnull String path, @@ -206,19 +226,29 @@ public BrowseResult getCachedBrowseResults( int from, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults").time()) { Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_BROWSE_CACHE_NAME); BrowseResult result; if (enableCache(flags)) { - try (Timer.Context ignored2 = MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "browse_cache_access").time(); - Object cacheKey = Sextet.with(entityName, path, filters != null ? toJsonString(filters) : null, - flags != null ? toJsonString(flags) : null, from, size); + try (Timer.Context ignored2 = + MetricUtils.timer(this.getClass(), "getCachedBrowseResults_cache").time()) { + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "browse_cache_access").time(); + Object cacheKey = + Sextet.with( + entityName, + path, + filters != null ? toJsonString(filters) : null, + flags != null ? toJsonString(flags) : null, + from, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(BrowseResult.class, json) : null; cacheAccess.stop(); if (result == null) { - Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); + Timer.Context cacheMiss = + MetricUtils.timer(this.getClass(), "browse_cache_miss").time(); result = getRawBrowseResults(entityName, path, filters, from, size); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); @@ -232,9 +262,7 @@ public BrowseResult getCachedBrowseResults( } } - /** - * Returns cached scroll results. - */ + /** Returns cached scroll results. */ public ScrollResult getCachedScrollResults( @Nonnull List entities, @Nonnull String query, @@ -244,37 +272,62 @@ public ScrollResult getCachedScrollResults( @Nullable String keepAlive, int size, @Nullable SearchFlags flags) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { - boolean isFullText = Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "getCachedScrollResults").time()) { + boolean isFullText = + Boolean.TRUE.equals(Optional.ofNullable(flags).orElse(new SearchFlags()).isFulltext()); Cache cache = cacheManager.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); ScrollResult result; if (enableCache(flags)) { - Timer.Context cacheAccess = MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); - Object cacheKey = Septet.with(entities, query, - filters != null ? toJsonString(filters) : null, - sortCriterion != null ? toJsonString(sortCriterion) : null, - flags != null ? toJsonString(flags) : null, - scrollId, size); + Timer.Context cacheAccess = + MetricUtils.timer(this.getClass(), "scroll_cache_access").time(); + Object cacheKey = + Septet.with( + entities, + query, + filters != null ? toJsonString(filters) : null, + sortCriterion != null ? toJsonString(sortCriterion) : null, + flags != null ? toJsonString(flags) : null, + scrollId, + size); String json = cache.get(cacheKey, String.class); result = json != null ? toRecordTemplate(ScrollResult.class, json) : null; cacheAccess.stop(); if (result == null) { Timer.Context cacheMiss = MetricUtils.timer(this.getClass(), "scroll_cache_miss").time(); - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); cache.put(cacheKey, toJsonString(result)); cacheMiss.stop(); MetricUtils.counter(this.getClass(), "scroll_cache_miss_count").inc(); } } else { - result = getRawScrollResults(entities, query, filters, sortCriterion, scrollId, keepAlive, size, isFullText, flags); + result = + getRawScrollResults( + entities, + query, + filters, + sortCriterion, + scrollId, + keepAlive, + size, + isFullText, + flags); } return result; } } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private SearchResult getRawSearchResults( final List entityNames, final String input, @@ -284,46 +337,31 @@ private SearchResult getRawSearchResults( final int count, @Nullable final SearchFlags searchFlags, @Nullable final List facets) { - return entitySearchService.search(entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); + return entitySearchService.search( + entityNames, input, filters, sortCriterion, start, count, searchFlags, facets); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private AutoCompleteResult getRawAutoCompleteResults( final String entityName, final String input, final String field, final Filter filters, final int limit) { - return entitySearchService.autoComplete( - entityName, - input, - field, - filters, - limit); + return entitySearchService.autoComplete(entityName, input, field, filters, limit); } - /** - * Executes the expensive autocomplete query using the {@link EntitySearchService} - */ + /** Executes the expensive autocomplete query using the {@link EntitySearchService} */ private BrowseResult getRawBrowseResults( final String entityName, final String input, final Filter filters, final int start, final int count) { - return entitySearchService.browse( - entityName, - input, - filters, - start, - count); + return entitySearchService.browse(entityName, input, filters, start, count); } - /** - * Executes the expensive search query using the {@link EntitySearchService} - */ + /** Executes the expensive search query using the {@link EntitySearchService} */ private ScrollResult getRawScrollResults( final List entities, final String input, @@ -336,31 +374,15 @@ private ScrollResult getRawScrollResults( @Nullable final SearchFlags searchFlags) { if (fulltext) { return entitySearchService.fullTextScroll( - entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } else { - return entitySearchService.structuredScroll(entities, - input, - filters, - sortCriterion, - scrollId, - keepAlive, - count, - searchFlags); + return entitySearchService.structuredScroll( + entities, input, filters, sortCriterion, scrollId, keepAlive, count, searchFlags); } } - /** - * Returns true if the cache should be used or skipped when fetching search results - */ + /** Returns true if the cache should be used or skipped when fetching search results */ private boolean enableCache(final SearchFlags searchFlags) { return enableCache && (searchFlags == null || !searchFlags.isSkipCache()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java index 68a5483fa469c..f40da59a149fa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchService.java @@ -17,19 +17,16 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.SearchUtils; - +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.util.List; import java.util.Map; import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchResponse; - @Slf4j @RequiredArgsConstructor public class ElasticSearchService implements EntitySearchService, ElasticSearchIndexed { @@ -66,15 +63,19 @@ public long docCount(@Nonnull String entityName) { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { - log.debug(String.format("Upserting Search document entityName: %s, document: %s, docId: %s", entityName, document, - docId)); + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + log.debug( + String.format( + "Upserting Search document entityName: %s, document: %s, docId: %s", + entityName, document, docId)); esWriteDAO.upsertDocument(entityName, document, docId); } @Override public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { - log.debug(String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); + log.debug( + String.format("Deleting Search document entityName: %s, docId: %s", entityName, docId)); esWriteDAO.deleteDocument(entityName, docId); } @@ -82,12 +83,15 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId) { final Optional maybeDocId = SearchUtils.getDocId(urn); if (!maybeDocId.isPresent()) { - log.warn(String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); + log.warn( + String.format("Failed to append run id, could not generate a doc id for urn %s", urn)); return; } final String docId = maybeDocId.get(); log.debug(String.format("Appending run id for entityName: %s, docId: %s", entityName, docId)); - esWriteDAO.applyScriptUpdate(entityName, docId, + esWriteDAO.applyScriptUpdate( + entityName, + docId, /* Script used to apply updates to the runId field of the index. This script saves the past N run ids which touched a particular URN in the search index. @@ -99,102 +103,161 @@ public void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable + "ctx._source.runId.add('%s'); " + "if (ctx._source.runId.length > %s) { ctx._source.runId.remove(0) } } " + "} else { ctx._source.runId = ['%s'] }", - runId, - runId, - MAX_RUN_IDS_INDEXED, - runId)); + runId, runId, MAX_RUN_IDS_INDEXED, runId)); } @Nonnull @Override - public SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags) { + public SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags) { return search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, null); } @Nonnull - public SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List facets) { - log.debug(String.format( - "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", - entityNames, input, postFilters, sortCriterion, from, size)); - return esSearchDAO.search(entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); + public SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List facets) { + log.debug( + String.format( + "Searching FullText Search documents entityName: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", + entityNames, input, postFilters, sortCriterion, from, size)); + return esSearchDAO.search( + entityNames, input, postFilters, sortCriterion, from, size, searchFlags, facets); } @Nonnull @Override - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { log.debug( - String.format("Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", + String.format( + "Filtering Search documents entityName: %s, filters: %s, sortCriterion: %s, from: %s, size: %s", entityName, filters, sortCriterion, from, size)); return esSearchDAO.filter(entityName, filters, sortCriterion, from, size); } @Nonnull @Override - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { - log.debug(String.format("Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", - entityName, query, field, requestParams, limit)); + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + String.format( + "Autocompleting query entityName: %s, query: %s, field: %s, requestParams: %s, limit: %s", + entityName, query, field, requestParams, limit)); return esSearchDAO.autoComplete(entityName, query, field, requestParams, limit); } @Nonnull @Override - public Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - log.debug("Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", entityNames != null ? entityNames.toString() : null, field, - requestParams, limit); + public Map aggregateByValue( + @Nullable List entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + log.debug( + "Aggregating by value: {}, field: {}, requestParams: {}, limit: {}", + entityNames != null ? entityNames.toString() : null, + field, + requestParams, + limit); return esSearchDAO.aggregateByValue(entityNames, field, requestParams, limit); } @Nonnull @Override - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { log.debug( - String.format("Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", entityName, - path, filters, from, size)); + String.format( + "Browsing entities entityName: %s, path: %s, filters: %s, from: %s, size: %s", + entityName, path, filters, from, size)); return esBrowseDAO.browse(entityName, path, filters, from, size); } @Nonnull @Override - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { return esBrowseDAO.browseV2(entityName, path, filter, input, start, count); } @Nonnull @Override public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { - log.debug(String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); + log.debug( + String.format("Getting browse paths for entity entityName: %s, urn: %s", entityName, urn)); return esBrowseDAO.getBrowsePaths(entityName, urn); } @Nonnull @Override - public ScrollResult fullTextScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult fullTextScroll( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling Structured Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(true); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, - flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } @Nonnull @Override - public ScrollResult structuredScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags) { - log.debug(String.format( - "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", - entities, input, postFilters, sortCriterion, scrollId, size)); + public ScrollResult structuredScroll( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags) { + log.debug( + String.format( + "Scrolling FullText Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, scrollId: %s, size: %s", + entities, input, postFilters, sortCriterion, scrollId, size)); SearchFlags flags = Optional.ofNullable(searchFlags).orElse(new SearchFlags()); flags.setFulltext(false); - return esSearchDAO.scroll(entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); + return esSearchDAO.scroll( + entities, input, postFilters, sortCriterion, scrollId, keepAlive, size, flags); } public Optional raw(@Nonnull String indexName, @Nullable String jsonQuery) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java index 43431e93622f7..388dcea784cbb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ESIndexBuilder.java @@ -1,11 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.google.common.collect.ImmutableMap; - +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.version.GitVersion; +import com.linkedin.util.Pair; +import io.github.resilience4j.retry.Retry; +import io.github.resilience4j.retry.RetryConfig; +import io.github.resilience4j.retry.RetryRegistry; import java.io.IOException; import java.time.Duration; import java.time.Instant; @@ -21,11 +24,6 @@ import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import com.linkedin.util.Pair; -import io.github.resilience4j.retry.Retry; -import io.github.resilience4j.retry.RetryConfig; -import io.github.resilience4j.retry.RetryRegistry; import javax.annotation.Nullable; import lombok.Getter; import lombok.extern.slf4j.Slf4j; @@ -37,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.client.GetAliasesResponse; @@ -54,55 +53,52 @@ import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.ReindexRequest; -import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.tasks.TaskInfo; - @Slf4j public class ESIndexBuilder { private final RestHighLevelClient _searchClient; - @Getter - private final int numShards; + @Getter private final int numShards; - @Getter - private final int numReplicas; + @Getter private final int numReplicas; - @Getter - private final int numRetries; + @Getter private final int numRetries; - @Getter - private final int refreshIntervalSeconds; + @Getter private final int refreshIntervalSeconds; - @Getter - private final Map> indexSettingOverrides; + @Getter private final Map> indexSettingOverrides; - @Getter - private final boolean enableIndexSettingsReindex; + @Getter private final boolean enableIndexSettingsReindex; - @Getter - private final boolean enableIndexMappingsReindex; + @Getter private final boolean enableIndexMappingsReindex; - @Getter - private final ElasticSearchConfiguration elasticSearchConfiguration; + @Getter private final ElasticSearchConfiguration elasticSearchConfiguration; - @Getter - private final GitVersion gitVersion; + @Getter private final GitVersion gitVersion; - final private static RequestOptions REQUEST_OPTIONS = RequestOptions.DEFAULT.toBuilder() - .setRequestConfig(RequestConfig.custom() - .setSocketTimeout(180 * 1000).build()).build(); + private static final RequestOptions REQUEST_OPTIONS = + RequestOptions.DEFAULT.toBuilder() + .setRequestConfig(RequestConfig.custom().setSocketTimeout(180 * 1000).build()) + .build(); private final RetryRegistry retryRegistry; - public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numReplicas, int numRetries, - int refreshIntervalSeconds, Map> indexSettingOverrides, - boolean enableIndexSettingsReindex, boolean enableIndexMappingsReindex, - ElasticSearchConfiguration elasticSearchConfiguration, GitVersion gitVersion) { + public ESIndexBuilder( + RestHighLevelClient searchClient, + int numShards, + int numReplicas, + int numRetries, + int refreshIntervalSeconds, + Map> indexSettingOverrides, + boolean enableIndexSettingsReindex, + boolean enableIndexMappingsReindex, + ElasticSearchConfiguration elasticSearchConfiguration, + GitVersion gitVersion) { this._searchClient = searchClient; this.numShards = numShards; this.numReplicas = numReplicas; @@ -114,7 +110,8 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.elasticSearchConfiguration = elasticSearchConfiguration; this.gitVersion = gitVersion; - RetryConfig config = RetryConfig.custom() + RetryConfig config = + RetryConfig.custom() .maxAttempts(Math.max(1, numRetries)) .waitDuration(Duration.ofSeconds(10)) .retryOnException(e -> e instanceof OpenSearchException) @@ -125,8 +122,11 @@ public ESIndexBuilder(RestHighLevelClient searchClient, int numShards, int numRe this.retryRegistry = RetryRegistry.of(config); } - public ReindexConfig buildReindexState(String indexName, Map mappings, Map settings) throws IOException { - ReindexConfig.ReindexConfigBuilder builder = ReindexConfig.builder() + public ReindexConfig buildReindexState( + String indexName, Map mappings, Map settings) + throws IOException { + ReindexConfig.ReindexConfigBuilder builder = + ReindexConfig.builder() .name(indexName) .enableIndexSettingsReindex(enableIndexSettingsReindex) .enableIndexMappingsReindex(enableIndexMappingsReindex) @@ -142,7 +142,8 @@ public ReindexConfig buildReindexState(String indexName, Map map builder.targetSettings(targetSetting); // Check if index exists - boolean exists = _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); + boolean exists = + _searchClient.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT); builder.exists(exists); // If index doesn't exist, no reindex @@ -150,7 +151,9 @@ public ReindexConfig buildReindexState(String indexName, Map map return builder.build(); } - Settings currentSettings = _searchClient.indices() + Settings currentSettings = + _searchClient + .indices() .getSettings(new GetSettingsRequest().indices(indexName), RequestOptions.DEFAULT) .getIndexToSettings() .values() @@ -158,7 +161,9 @@ public ReindexConfig buildReindexState(String indexName, Map map .next(); builder.currentSettings(currentSettings); - Map currentMappings = _searchClient.indices() + Map currentMappings = + _searchClient + .indices() .getMapping(new GetMappingsRequest().indices(indexName), RequestOptions.DEFAULT) .mappings() .values() @@ -172,16 +177,19 @@ public ReindexConfig buildReindexState(String indexName, Map map } /** - * Builds index with given name, mappings and settings - * Deprecated: Use the `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class - * earlier in the process. + * Builds index with given name, mappings and settings Deprecated: Use the + * `buildIndex(ReindexConfig indexState) to enforce conventions via ReindexConfig class earlier in + * the process. + * * @param indexName index name * @param mappings ES mappings * @param settings ES settings * @throws IOException ES error */ @Deprecated - public void buildIndex(String indexName, Map mappings, Map settings) throws IOException { + public void buildIndex( + String indexName, Map mappings, Map settings) + throws IOException { buildIndex(buildReindexState(indexName, mappings, settings)); } @@ -210,15 +218,20 @@ public void buildIndex(ReindexConfig indexState) throws IOException { if (indexState.requiresApplySettings()) { UpdateSettingsRequest request = new UpdateSettingsRequest(indexState.name()); - Map indexSettings = ((Map) indexState.targetSettings().get("index")) + Map indexSettings = + ((Map) indexState.targetSettings().get("index")) .entrySet().stream() - .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) - .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); + .filter(e -> ReindexConfig.SETTINGS_DYNAMIC.contains(e.getKey())) + .collect(Collectors.toMap(e -> "index." + e.getKey(), Map.Entry::getValue)); request.settings(indexSettings); - boolean ack = _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexState.name(), - ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), ack); + boolean ack = + _searchClient.indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexState.name(), + ReindexConfig.OBJECT_MAPPER.writeValueAsString(indexSettings), + ack); } } else { try { @@ -231,30 +244,40 @@ public void buildIndex(ReindexConfig indexState) throws IOException { /** * Apply mappings changes if reindex is not required + * * @param indexState the state of the current and target index settings/mappings - * @param suppressError during reindex logic this is not an error, for structured properties it is an error + * @param suppressError during reindex logic this is not an error, for structured properties it is + * an error * @throws IOException communication issues with ES */ public void applyMappings(ReindexConfig indexState, boolean suppressError) throws IOException { if (indexState.isPureMappingsAddition()) { log.info("Updating index {} mappings in place.", indexState.name()); - PutMappingRequest request = new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); + PutMappingRequest request = + new PutMappingRequest(indexState.name()).source(indexState.targetMappings()); _searchClient.indices().putMapping(request, RequestOptions.DEFAULT); log.info("Updated index {} with new mappings", indexState.name()); } else { if (!suppressError) { - log.error("Attempted to apply invalid mappings. Current: {} Target: {}", indexState.currentMappings(), - indexState.targetMappings()); + log.error( + "Attempted to apply invalid mappings. Current: {} Target: {}", + indexState.currentMappings(), + indexState.targetMappings()); } } } - public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options, ReindexConfig config) + public String reindexInPlaceAsync( + String indexAlias, + @Nullable QueryBuilder filterQuery, + BatchWriteOperationsOptions options, + ReindexConfig config) throws Exception { - GetAliasesResponse aliasesResponse = _searchClient.indices().getAlias( - new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + _searchClient.indices().getAlias(new GetAliasesRequest(indexAlias), RequestOptions.DEFAULT); if (aliasesResponse.getAliases().isEmpty()) { - throw new IllegalArgumentException(String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); + throw new IllegalArgumentException( + String.format("Input to reindexInPlaceAsync should be an alias. %s is not", indexAlias)); } // Point alias at new index @@ -262,9 +285,12 @@ public String reindexInPlaceAsync(String indexAlias, @Nullable QueryBuilder filt createIndex(nextIndexName, config); renameReindexedIndices(_searchClient, indexAlias, null, nextIndexName, false); - return submitReindex(aliasesResponse.getAliases().keySet().toArray(new String[0]), - nextIndexName, options.getBatchSize(), - TimeValue.timeValueSeconds(options.getTimeoutSeconds()), filterQuery); + return submitReindex( + aliasesResponse.getAliases().keySet().toArray(new String[0]), + nextIndexName, + options.getBatchSize(), + TimeValue.timeValueSeconds(options.getTimeoutSeconds()), + filterQuery); } private static String getNextIndexName(String base, long startTime) { @@ -286,10 +312,14 @@ private void reindex(ReindexConfig indexState) throws Throwable { String parentTaskId; if (previousTaskInfo.isPresent()) { - log.info("Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", - previousTaskInfo.get().getTaskId(), previousTaskInfo.get().getDescription()); + log.info( + "Reindex task {} in progress with description {}. Attempting to continue task from breakpoint.", + previousTaskInfo.get().getTaskId(), + previousTaskInfo.get().getDescription()); parentTaskId = previousTaskInfo.get().getParentTaskId().toString(); - tempIndexName = ESUtils.extractTargetIndex(previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); + tempIndexName = + ESUtils.extractTargetIndex( + previousTaskInfo.get().getHeaders().get(ESUtils.OPAQUE_ID_HEADER)); } else { // Create new index createIndex(tempIndexName, indexState); @@ -304,7 +334,11 @@ private void reindex(ReindexConfig indexState) throws Throwable { long documentCountsLastUpdated = System.currentTimeMillis(); while (System.currentTimeMillis() < timeoutAt) { - log.info("Task: {} - Reindexing from {} to {} in progress...", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing from {} to {} in progress...", + parentTaskId, + indexState.name(), + tempIndexName); Pair tempDocumentsCount = getDocumentCounts(indexState.name(), tempIndexName); if (!tempDocumentsCount.equals(documentCounts)) { @@ -313,18 +347,28 @@ private void reindex(ReindexConfig indexState) throws Throwable { } if (documentCounts.getFirst().equals(documentCounts.getSecond())) { - log.info("Task: {} - Reindexing {} to {} task was successful", parentTaskId, indexState.name(), tempIndexName); + log.info( + "Task: {} - Reindexing {} to {} task was successful", + parentTaskId, + indexState.name(), + tempIndexName); reindexTaskCompleted = true; break; } else { - log.warn("Task: {} - Document counts do not match {} != {}. Complete: {}%", parentTaskId, documentCounts.getFirst(), - documentCounts.getSecond(), 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); + log.warn( + "Task: {} - Document counts do not match {} != {}. Complete: {}%", + parentTaskId, + documentCounts.getFirst(), + documentCounts.getSecond(), + 100 * (1.0f * documentCounts.getSecond()) / documentCounts.getFirst()); long lastUpdateDelta = System.currentTimeMillis() - documentCountsLastUpdated; if (lastUpdateDelta > (300 * 1000)) { - if (reindexCount <= numRetries) { - log.warn("No change in index count after 5 minutes, re-triggering reindex #{}.", reindexCount); + if (reindexCount <= numRetries) { + log.warn( + "No change in index count after 5 minutes, re-triggering reindex #{}.", + reindexCount); submitReindex(indexState.name(), tempIndexName); reindexCount = reindexCount + 1; documentCountsLastUpdated = System.currentTimeMillis(); // reset timer @@ -341,37 +385,63 @@ private void reindex(ReindexConfig indexState) throws Throwable { if (!reindexTaskCompleted) { if (elasticSearchConfiguration.getBuildIndices().isAllowDocCountMismatch() - && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { - log.warn("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" - + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); + && elasticSearchConfiguration.getBuildIndices().isCloneIndices()) { + log.warn( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}\n" + + "This condition is explicitly ALLOWED, please refer to latest clone if original index is required.", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); } else { - log.error("Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", - indexState.name(), documentCounts.getFirst(), documentCounts.getSecond()); - diff(indexState.name(), tempIndexName, Math.max(documentCounts.getFirst(), documentCounts.getSecond())); - throw new RuntimeException(String.format("Reindex from %s to %s failed. Document count %s != %s", indexState.name(), tempIndexName, - documentCounts.getFirst(), documentCounts.getSecond())); + log.error( + "Index: {} - Post-reindex document count is different, source_doc_count: {} reindex_doc_count: {}", + indexState.name(), + documentCounts.getFirst(), + documentCounts.getSecond()); + diff( + indexState.name(), + tempIndexName, + Math.max(documentCounts.getFirst(), documentCounts.getSecond())); + throw new RuntimeException( + String.format( + "Reindex from %s to %s failed. Document count %s != %s", + indexState.name(), + tempIndexName, + documentCounts.getFirst(), + documentCounts.getSecond())); } } } catch (Throwable e) { - log.error("Failed to reindex {} to {}: Exception {}", indexState.name(), tempIndexName, e.toString()); - _searchClient.indices().delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); + log.error( + "Failed to reindex {} to {}: Exception {}", + indexState.name(), + tempIndexName, + e.toString()); + _searchClient + .indices() + .delete(new DeleteIndexRequest().indices(tempIndexName), RequestOptions.DEFAULT); throw e; } log.info("Reindex from {} to {} succeeded", indexState.name(), tempIndexName); - renameReindexedIndices(_searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); + renameReindexedIndices( + _searchClient, indexState.name(), indexState.indexPattern(), tempIndexName, true); log.info("Finished setting up {}", indexState.name()); } - public static void renameReindexedIndices(RestHighLevelClient searchClient, String originalName, @Nullable String pattern, String newName, boolean deleteOld) + public static void renameReindexedIndices( + RestHighLevelClient searchClient, + String originalName, + @Nullable String pattern, + String newName, + boolean deleteOld) throws IOException { GetAliasesRequest getAliasesRequest = new GetAliasesRequest(originalName); if (pattern != null) { getAliasesRequest.indices(pattern); } - GetAliasesResponse aliasesResponse = searchClient.indices().getAlias( - getAliasesRequest, RequestOptions.DEFAULT); + GetAliasesResponse aliasesResponse = + searchClient.indices().getAlias(getAliasesRequest, RequestOptions.DEFAULT); // If not aliased, delete the original index final Collection aliasedIndexDelete; @@ -384,23 +454,31 @@ public static void renameReindexedIndices(RestHighLevelClient searchClient, Stri } // Add alias for the new index - AliasActions removeAction = deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); + AliasActions removeAction = + deleteOld ? AliasActions.removeIndex() : AliasActions.remove().alias(originalName); removeAction.indices(aliasedIndexDelete.toArray(new String[0])); AliasActions addAction = AliasActions.add().alias(originalName).index(newName); - searchClient.indices() - .updateAliases(new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), + searchClient + .indices() + .updateAliases( + new IndicesAliasesRequest().addAliasAction(removeAction).addAliasAction(addAction), RequestOptions.DEFAULT); } - private String submitReindex(String[] sourceIndices, String destinationIndex, - int batchSize, @Nullable TimeValue timeout, - @Nullable QueryBuilder sourceFilterQuery) throws IOException { - ReindexRequest reindexRequest = new ReindexRequest() - .setSourceIndices(sourceIndices) - .setDestIndex(destinationIndex) - .setMaxRetries(numRetries) - .setAbortOnVersionConflict(false) - .setSourceBatchSize(batchSize); + private String submitReindex( + String[] sourceIndices, + String destinationIndex, + int batchSize, + @Nullable TimeValue timeout, + @Nullable QueryBuilder sourceFilterQuery) + throws IOException { + ReindexRequest reindexRequest = + new ReindexRequest() + .setSourceIndices(sourceIndices) + .setDestIndex(destinationIndex) + .setMaxRetries(numRetries) + .setAbortOnVersionConflict(false) + .setSourceBatchSize(batchSize); if (timeout != null) { reindexRequest.setTimeout(timeout); } @@ -408,26 +486,34 @@ private String submitReindex(String[] sourceIndices, String destinationIndex, reindexRequest.setSourceQuery(sourceFilterQuery); } - RequestOptions requestOptions = ESUtils.buildReindexTaskRequestOptions(gitVersion.getVersion(), sourceIndices[0], - destinationIndex); - TaskSubmissionResponse reindexTask = _searchClient.submitReindexTask(reindexRequest, requestOptions); + RequestOptions requestOptions = + ESUtils.buildReindexTaskRequestOptions( + gitVersion.getVersion(), sourceIndices[0], destinationIndex); + TaskSubmissionResponse reindexTask = + _searchClient.submitReindexTask(reindexRequest, requestOptions); return reindexTask.getTask(); } private String submitReindex(String sourceIndex, String destinationIndex) throws IOException { - return submitReindex(new String[]{sourceIndex}, destinationIndex, 2500, null, null); + return submitReindex(new String[] {sourceIndex}, destinationIndex, 2500, null, null); } - private Pair getDocumentCounts(String sourceIndex, String destinationIndex) throws Throwable { + private Pair getDocumentCounts(String sourceIndex, String destinationIndex) + throws Throwable { // Check whether reindex succeeded by comparing document count - // There can be some delay between the reindex finishing and count being fully up to date, so try multiple times + // There can be some delay between the reindex finishing and count being fully up to date, so + // try multiple times long originalCount = 0; long reindexedCount = 0; for (int i = 0; i < this.numRetries; i++) { // Check if reindex succeeded by comparing document counts - originalCount = retryRegistry.retry("retrySourceIndexCount") + originalCount = + retryRegistry + .retry("retrySourceIndexCount") .executeCheckedSupplier(() -> getCount(sourceIndex)); - reindexedCount = retryRegistry.retry("retryDestinationIndexCount") + reindexedCount = + retryRegistry + .retry("retryDestinationIndexCount") .executeCheckedSupplier(() -> getCount(destinationIndex)); if (originalCount == reindexedCount) { break; @@ -445,13 +531,20 @@ private Pair getDocumentCounts(String sourceIndex, String destinatio private Optional getTaskInfoByHeader(String indexName) throws Throwable { Retry retryWithDefaultConfig = retryRegistry.retry("getTaskInfoByHeader"); - return retryWithDefaultConfig.executeCheckedSupplier(() -> { - ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); - List taskInfos = _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); - return taskInfos.stream() - .filter(info -> ESUtils.prefixMatch(info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), gitVersion.getVersion(), - indexName)).findFirst(); - }); + return retryWithDefaultConfig.executeCheckedSupplier( + () -> { + ListTasksRequest listTasksRequest = new ListTasksRequest().setDetailed(true); + List taskInfos = + _searchClient.tasks().list(listTasksRequest, REQUEST_OPTIONS).getTasks(); + return taskInfos.stream() + .filter( + info -> + ESUtils.prefixMatch( + info.getHeaders().get(ESUtils.OPAQUE_ID_HEADER), + gitVersion.getVersion(), + indexName)) + .findFirst(); + }); } private void diff(String indexA, String indexB, long maxDocs) { @@ -470,12 +563,17 @@ private void diff(String indexA, String indexB, long maxDocs) { SearchResponse responseA = _searchClient.search(indexARequest, RequestOptions.DEFAULT); SearchResponse responseB = _searchClient.search(indexBRequest, RequestOptions.DEFAULT); - Set actual = Arrays.stream(responseB.getHits().getHits()) - .map(SearchHit::getId).collect(Collectors.toSet()); + Set actual = + Arrays.stream(responseB.getHits().getHits()) + .map(SearchHit::getId) + .collect(Collectors.toSet()); - log.error("Missing {}", Arrays.stream(responseA.getHits().getHits()) + log.error( + "Missing {}", + Arrays.stream(responseA.getHits().getHits()) .filter(doc -> !actual.contains(doc.getId())) - .map(SearchHit::getSourceAsString).collect(Collectors.toSet())); + .map(SearchHit::getSourceAsString) + .collect(Collectors.toSet())); } catch (IOException e) { throw new RuntimeException(e); } @@ -483,7 +581,10 @@ private void diff(String indexA, String indexB, long maxDocs) { } private long getCount(@Nonnull String indexName) throws IOException { - return _searchClient.count(new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), RequestOptions.DEFAULT) + return _searchClient + .count( + new CountRequest(indexName).query(QueryBuilders.matchAllQuery()), + RequestOptions.DEFAULT) .getCount(); } @@ -496,30 +597,48 @@ private void createIndex(String indexName, ReindexConfig state) throws IOExcepti log.info("Created index {}", indexName); } - public static void cleanIndex(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, ReindexConfig indexState) { - log.info("Checking for orphan index pattern {} older than {} {}", indexState.indexPattern(), - esConfig.getBuildIndices().getRetentionValue(), - esConfig.getBuildIndices().getRetentionUnit()); - - getOrphanedIndices(searchClient, esConfig, indexState).forEach(orphanIndex -> { - log.warn("Deleting orphan index {}.", orphanIndex); - try { - searchClient.indices().delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + public static void cleanIndex( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { + log.info( + "Checking for orphan index pattern {} older than {} {}", + indexState.indexPattern(), + esConfig.getBuildIndices().getRetentionValue(), + esConfig.getBuildIndices().getRetentionUnit()); + + getOrphanedIndices(searchClient, esConfig, indexState) + .forEach( + orphanIndex -> { + log.warn("Deleting orphan index {}.", orphanIndex); + try { + searchClient + .indices() + .delete(new DeleteIndexRequest().indices(orphanIndex), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } - private static List getOrphanedIndices(RestHighLevelClient searchClient, ElasticSearchConfiguration esConfig, - ReindexConfig indexState) { + private static List getOrphanedIndices( + RestHighLevelClient searchClient, + ElasticSearchConfiguration esConfig, + ReindexConfig indexState) { List orphanedIndices = new ArrayList<>(); try { - Date retentionDate = Date.from(Instant.now() - .minus(Duration.of(esConfig.getBuildIndices().getRetentionValue(), - ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); - - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); + Date retentionDate = + Date.from( + Instant.now() + .minus( + Duration.of( + esConfig.getBuildIndices().getRetentionValue(), + ChronoUnit.valueOf(esConfig.getBuildIndices().getRetentionUnit())))); + + GetIndexResponse response = + searchClient + .indices() + .get(new GetIndexRequest(indexState.indexCleanPattern()), RequestOptions.DEFAULT); for (String index : response.getIndices()) { var creationDateStr = response.getSetting(index, "index.creation_date"); @@ -530,7 +649,8 @@ private static List getOrphanedIndices(RestHighLevelClient searchClient, continue; } - if (response.getAliases().containsKey(index) && response.getAliases().get(index).size() == 0) { + if (response.getAliases().containsKey(index) + && response.getAliases().get(index).size() == 0) { log.info("Index {} is orphaned", index); orphanedIndices.add(index); } @@ -539,7 +659,9 @@ private static List getOrphanedIndices(RestHighLevelClient searchClient, if (e.getMessage().contains("index_not_found_exception")) { log.info("No orphaned indices found with pattern {}", indexState.indexCleanPattern()); } else { - log.error("An error occurred when trying to identify orphaned indices. Exception: {}", e.getMessage()); + log.error( + "An error occurred when trying to identify orphaned indices. Exception: {}", + e.getMessage()); } } return orphanedIndices; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java index 56cb26b09dc33..4489c661bb2ed 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/EntityIndexBuilders.java @@ -3,50 +3,50 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - import java.io.IOException; import java.util.List; import java.util.Map; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @RequiredArgsConstructor @Slf4j public class EntityIndexBuilders implements ElasticSearchIndexed { - private final ESIndexBuilder indexBuilder; - private final EntityRegistry entityRegistry; - private final IndexConvention indexConvention; - private final SettingsBuilder settingsBuilder; - - public ESIndexBuilder getIndexBuilder() { - return indexBuilder; + private final ESIndexBuilder indexBuilder; + private final EntityRegistry entityRegistry; + private final IndexConvention indexConvention; + private final SettingsBuilder settingsBuilder; + + public ESIndexBuilder getIndexBuilder() { + return indexBuilder; + } + + @Override + public void reindexAll() { + for (ReindexConfig config : buildReindexConfigs()) { + try { + indexBuilder.buildIndex(config); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - @Override - public void reindexAll() { - for (ReindexConfig config : buildReindexConfigs()) { - try { - indexBuilder.buildIndex(config); - } catch (IOException e) { + } + + @Override + public List buildReindexConfigs() { + Map settings = settingsBuilder.getSettings(); + return entityRegistry.getEntitySpecs().values().stream() + .map( + entitySpec -> { + try { + Map mappings = MappingsBuilder.getMappings(entitySpec); + return indexBuilder.buildReindexState( + indexConvention.getIndexName(entitySpec), mappings, settings); + } catch (IOException e) { throw new RuntimeException(e); - } - } - } - - @Override - public List buildReindexConfigs() { - Map settings = settingsBuilder.getSettings(); - return entityRegistry.getEntitySpecs().values().stream().map(entitySpec -> { - try { - Map mappings = MappingsBuilder.getMappings(entitySpec); - return indexBuilder.buildReindexState(indexConvention.getIndexName(entitySpec), mappings, settings); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList()); - } + } + }) + .collect(Collectors.toList()); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 13a0f57ccea99..f85a0dcb06a07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; @@ -14,20 +16,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Slf4j public class MappingsBuilder { - private static final Map PARTIAL_NGRAM_CONFIG = ImmutableMap.of( + private static final Map PARTIAL_NGRAM_CONFIG = + ImmutableMap.of( TYPE, "search_as_you_type", MAX_SHINGLE_SIZE, "4", DOC_VALUES, "false"); - public static Map getPartialNgramConfigWithOverrides(Map overrides) { + public static Map getPartialNgramConfigWithOverrides( + Map overrides) { return Stream.concat(PARTIAL_NGRAM_CONFIG.entrySet().stream(), overrides.entrySet().stream()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } public static final Map KEYWORD_TYPE_MAP = ImmutableMap.of(TYPE, KEYWORD); @@ -45,16 +46,19 @@ public static Map getPartialNgramConfigWithOverrides(Map getMappings(@Nonnull final EntitySpec entitySpec) { Map mappings = new HashMap<>(); - entitySpec.getSearchableFieldSpecs() + entitySpec + .getSearchableFieldSpecs() .forEach(searchableFieldSpec -> mappings.putAll(getMappingsForField(searchableFieldSpec))); - entitySpec.getSearchScoreFieldSpecs() - .forEach(searchScoreFieldSpec -> mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); + entitySpec + .getSearchScoreFieldSpecs() + .forEach( + searchScoreFieldSpec -> + mappings.putAll(getMappingsForSearchScoreField(searchScoreFieldSpec))); // Fixed fields mappings.put("urn", getMappingsForUrn()); @@ -65,64 +69,70 @@ public static Map getMappings(@Nonnull final EntitySpec entitySp private static Map getMappingsForUrn() { Map subFields = new HashMap<>(); - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, URN_ANALYZER, SEARCH_ANALYZER, URN_SEARCH_ANALYZER, - SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER) - ); - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER)); + subFields.put( + NGRAM, + getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_URN_COMPONENT))); return ImmutableMap.builder() - .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) - .put(FIELDS, subFields) - .build(); + .put(TYPE, ESUtils.KEYWORD_FIELD_TYPE) + .put(FIELDS, subFields) + .build(); } private static Map getMappingsForRunId() { return ImmutableMap.builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } - private static Map getMappingsForField(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map getMappingsForField( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { FieldType fieldType = searchableFieldSpec.getSearchableAnnotation().getFieldType(); Map mappings = new HashMap<>(); Map mappingForField = new HashMap<>(); if (fieldType == FieldType.KEYWORD) { mappingForField.putAll(getMappingsForKeyword()); - } else if (fieldType == FieldType.TEXT || fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { + } else if (fieldType == FieldType.TEXT + || fieldType == FieldType.TEXT_PARTIAL + || fieldType == FieldType.WORD_GRAM) { mappingForField.putAll(getMappingsForSearchText(fieldType)); } else if (fieldType == FieldType.BROWSE_PATH) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, SLASH_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, ANALYZER, SLASH_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); } else if (fieldType == FieldType.BROWSE_PATH_V2) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); - mappingForField.put(FIELDS, - ImmutableMap.of(LENGTH, ImmutableMap.of( - TYPE, ESUtils.TOKEN_COUNT_FIELD_TYPE, - ANALYZER, UNIT_SEPARATOR_PATTERN_ANALYZER))); + mappingForField.put( + FIELDS, + ImmutableMap.of( + LENGTH, + ImmutableMap.of( + TYPE, + ESUtils.TOKEN_COUNT_FIELD_TYPE, + ANALYZER, + UNIT_SEPARATOR_PATTERN_ANALYZER))); mappingForField.put(ANALYZER, BROWSE_PATH_V2_HIERARCHY_ANALYZER); mappingForField.put(FIELDDATA, true); - } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { + } else if (fieldType == FieldType.URN || fieldType == FieldType.URN_PARTIAL) { mappingForField.put(TYPE, ESUtils.TEXT_FIELD_TYPE); mappingForField.put(ANALYZER, URN_ANALYZER); mappingForField.put(SEARCH_ANALYZER, URN_SEARCH_ANALYZER); mappingForField.put(SEARCH_QUOTE_ANALYZER, CUSTOM_QUOTE_ANALYZER); Map subFields = new HashMap<>(); if (fieldType == FieldType.URN_PARTIAL) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - Map.of( - ANALYZER, PARTIAL_URN_COMPONENT - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(Map.of(ANALYZER, PARTIAL_URN_COMPONENT))); } subFields.put(KEYWORD, KEYWORD_TYPE_MAP); mappingForField.put(FIELDS, subFields); @@ -141,12 +151,17 @@ private static Map getMappingsForField(@Nonnull final Searchable } mappings.put(searchableFieldSpec.getSearchableAnnotation().getFieldName(), mappingForField); - searchableFieldSpec.getSearchableAnnotation() + searchableFieldSpec + .getSearchableAnnotation() .getHasValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); - searchableFieldSpec.getSearchableAnnotation() + .ifPresent( + fieldName -> + mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.BOOLEAN_FIELD_TYPE))); + searchableFieldSpec + .getSearchableAnnotation() .getNumValuesFieldName() - .ifPresent(fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); + .ifPresent( + fieldName -> mappings.put(fieldName, ImmutableMap.of(TYPE, ESUtils.LONG_FIELD_TYPE))); mappings.putAll(getMappingsForFieldNameAliases(searchableFieldSpec)); return mappings; @@ -167,26 +182,25 @@ private static Map getMappingsForSearchText(FieldType fieldType) mappingForField.put(NORMALIZER, KEYWORD_NORMALIZER); Map subFields = new HashMap<>(); if (fieldType == FieldType.TEXT_PARTIAL || fieldType == FieldType.WORD_GRAM) { - subFields.put(NGRAM, getPartialNgramConfigWithOverrides( - ImmutableMap.of( - ANALYZER, PARTIAL_ANALYZER - ) - )); + subFields.put( + NGRAM, getPartialNgramConfigWithOverrides(ImmutableMap.of(ANALYZER, PARTIAL_ANALYZER))); if (fieldType == FieldType.WORD_GRAM) { - for (Map.Entry entry : Map.of( - WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, - WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, - WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER).entrySet()) { + for (Map.Entry entry : + Map.of( + WORD_GRAMS_LENGTH_2, WORD_GRAM_2_ANALYZER, + WORD_GRAMS_LENGTH_3, WORD_GRAM_3_ANALYZER, + WORD_GRAMS_LENGTH_4, WORD_GRAM_4_ANALYZER) + .entrySet()) { String fieldName = entry.getKey(); String analyzerName = entry.getValue(); - subFields.put(fieldName, ImmutableMap.of( - TYPE, ESUtils.TEXT_FIELD_TYPE, - ANALYZER, analyzerName - )); + subFields.put( + fieldName, ImmutableMap.of(TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, analyzerName)); } } } - subFields.put(DELIMITED, ImmutableMap.of( + subFields.put( + DELIMITED, + ImmutableMap.of( TYPE, ESUtils.TEXT_FIELD_TYPE, ANALYZER, TEXT_ANALYZER, SEARCH_ANALYZER, TEXT_SEARCH_ANALYZER, @@ -199,19 +213,23 @@ private static Map getMappingsForSearchText(FieldType fieldType) private static Map getMappingsForSearchScoreField( @Nonnull final SearchScoreFieldSpec searchScoreFieldSpec) { - return ImmutableMap.of(searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), + return ImmutableMap.of( + searchScoreFieldSpec.getSearchScoreAnnotation().getFieldName(), ImmutableMap.of(TYPE, ESUtils.DOUBLE_FIELD_TYPE)); } - private static Map getMappingsForFieldNameAliases(@Nonnull final SearchableFieldSpec searchableFieldSpec) { + private static Map getMappingsForFieldNameAliases( + @Nonnull final SearchableFieldSpec searchableFieldSpec) { Map mappings = new HashMap<>(); - List fieldNameAliases = searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); - fieldNameAliases.forEach(alias -> { - Map aliasMappings = new HashMap<>(); - aliasMappings.put(TYPE, ALIAS); - aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); - mappings.put(alias, aliasMappings); - }); + List fieldNameAliases = + searchableFieldSpec.getSearchableAnnotation().getFieldNameAliases(); + fieldNameAliases.forEach( + alias -> { + Map aliasMappings = new HashMap<>(); + aliasMappings.put(TYPE, ALIAS); + aliasMappings.put(PATH, searchableFieldSpec.getSearchableAnnotation().getFieldName()); + mappings.put(alias, aliasMappings); + }); return mappings; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java index 8b8a48f5d9cda..e3155c9f943cc 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/ReindexConfig.java @@ -1,256 +1,298 @@ package com.linkedin.metadata.search.elasticsearch.indexbuilder; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; -import lombok.Builder; -import lombok.Getter; -import lombok.experimental.Accessors; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.common.settings.Settings; - import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeMap; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.*; - +import lombok.Builder; +import lombok.Getter; +import lombok.experimental.Accessors; +import lombok.extern.slf4j.Slf4j; +import org.opensearch.common.settings.Settings; @Slf4j @Builder @Getter @Accessors(fluent = true) public class ReindexConfig { - public final static ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - /* - Most index settings are default values and populated by Elastic. This list is an include list to determine which - settings we care about when a difference is present. - */ - public static final List SETTINGS_DYNAMIC = ImmutableList.of("number_of_replicas", "refresh_interval"); - // These setting require reindex - public static final List SETTINGS_STATIC = ImmutableList.of("number_of_shards"); - public static final List SETTINGS = Stream.concat( - SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()).collect(Collectors.toList()); + public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - final private String name; - final private boolean exists; - final private Settings currentSettings; - final private Map targetSettings; - final private Map currentMappings; - final private Map targetMappings; - final private boolean enableIndexMappingsReindex; - final private boolean enableIndexSettingsReindex; - final private String version; + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } - /* Calculated */ - final private boolean requiresReindex; - final private boolean requiresApplySettings; - final private boolean requiresApplyMappings; - final private boolean isPureMappingsAddition; - final private boolean isSettingsReindex; + /* + Most index settings are default values and populated by Elastic. This list is an include list to determine which + settings we care about when a difference is present. + */ + public static final List SETTINGS_DYNAMIC = + ImmutableList.of("number_of_replicas", "refresh_interval"); + // These setting require reindex + public static final List SETTINGS_STATIC = ImmutableList.of("number_of_shards"); + public static final List SETTINGS = + Stream.concat(SETTINGS_DYNAMIC.stream(), SETTINGS_STATIC.stream()) + .collect(Collectors.toList()); - public static ReindexConfigBuilder builder() { - return new CalculatedBuilder(); - } + private final String name; + private final boolean exists; + private final Settings currentSettings; + private final Map targetSettings; + private final Map currentMappings; + private final Map targetMappings; + private final boolean enableIndexMappingsReindex; + private final boolean enableIndexSettingsReindex; + private final String version; - public static class ReindexConfigBuilder { - // hide calculated fields - private ReindexConfigBuilder requiresReindex(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplySettings(boolean ignored) { - return this; - } - private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { - return this; - } - private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { - return this; - } - private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { - return this; - } + /* Calculated */ + private final boolean requiresReindex; + private final boolean requiresApplySettings; + private final boolean requiresApplyMappings; + private final boolean isPureMappingsAddition; + private final boolean isSettingsReindex; - // ensure sorted - public ReindexConfigBuilder currentMappings(Map currentMappings) { - this.currentMappings = sortMap(currentMappings); - return this; - } - public ReindexConfigBuilder targetMappings(Map targetMappings) { - this.targetMappings = sortMap(targetMappings); - return this; - } + public static ReindexConfigBuilder builder() { + return new CalculatedBuilder(); + } - private static TreeMap sortMap(Map input) { - return input.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> { - if (e.getValue() instanceof Map) { - return sortMap((Map) e.getValue()); - } else { - return String.valueOf(e.getValue()); - } - }, - (oldValue, newValue) -> newValue, TreeMap::new)); - } + public static class ReindexConfigBuilder { + // hide calculated fields + private ReindexConfigBuilder requiresReindex(boolean ignored) { + return this; } - /** - * Implement calculated fields - */ - public String indexPattern() { - return name + "*"; + private ReindexConfigBuilder requiresApplySettings(boolean ignored) { + return this; } - public String indexCleanPattern() { - return name + "_*"; + private ReindexConfigBuilder requiresApplyMappings(boolean ignored) { + return this; } - private static class CalculatedBuilder extends ReindexConfigBuilder { - @Override - public ReindexConfig build() { - if (super.exists) { - /* Consider mapping changes */ - MapDifference mappingsDiff = Maps.difference( - getOrDefault(super.currentMappings, List.of("properties")), - getOrDefault(super.targetMappings, List.of("properties"))); - super.requiresApplyMappings = !mappingsDiff.entriesDiffering().isEmpty() - || !mappingsDiff.entriesOnlyOnRight().isEmpty(); - super.isPureMappingsAddition = super.requiresApplyMappings - && mappingsDiff.entriesDiffering().isEmpty() - && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + private ReindexConfigBuilder isPureMappingsAddition(boolean ignored) { + return this; + } - if (super.requiresApplyMappings && super.isPureMappingsAddition) { - log.info("Index: {} - New fields have been added to index. Adding: {}", - super.name, mappingsDiff.entriesOnlyOnRight()); - } else if (super.requiresApplyMappings) { - log.info("Index: {} - There's diff between new mappings (left) and old mappings (right): {}", - super.name, mappingsDiff.entriesDiffering()); - } + private ReindexConfigBuilder isSettingsReindexRequired(boolean ignored) { + return this; + } - /* Consider analysis and settings changes */ - super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); - super.isSettingsReindex = isSettingsReindexRequired(); + // ensure sorted + public ReindexConfigBuilder currentMappings(Map currentMappings) { + this.currentMappings = sortMap(currentMappings); + return this; + } - /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ - if (super.requiresApplyMappings && !super.isPureMappingsAddition) { - if (super.enableIndexMappingsReindex) { - super.requiresReindex = true; - } else { - log.warn("Index: {} - There's diff between new mappings, however reindexing is DISABLED.", super.name); - } - } - if (super.isSettingsReindex) { - try { - if (!isAnalysisEqual()) { - log.info("Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - if (!isSettingsEqual()) { - log.info("Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", - super.name, OBJECT_MAPPER.writeValueAsString(super.targetSettings), super.currentSettings); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - if (super.enableIndexSettingsReindex) { - super.requiresReindex = true; + public ReindexConfigBuilder targetMappings(Map targetMappings) { + this.targetMappings = sortMap(targetMappings); + return this; + } + + private static TreeMap sortMap(Map input) { + return input.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + e -> { + if (e.getValue() instanceof Map) { + return sortMap((Map) e.getValue()); } else { - log.warn("Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", super.name); + return String.valueOf(e.getValue()); } - } - } - return super.build(); - } + }, + (oldValue, newValue) -> newValue, + TreeMap::new)); + } + } - private static TreeMap getOrDefault(Map map, List path) { - if (map == null) { - return new TreeMap<>(); - } + /** Implement calculated fields */ + public String indexPattern() { + return name + "*"; + } - TreeMap item = (TreeMap) map.getOrDefault(path.get(0), new TreeMap()); - if (path.size() == 1) { - return item; - } else { - return getOrDefault(item, path.subList(1, path.size())); - } + public String indexCleanPattern() { + return name + "_*"; + } + + private static class CalculatedBuilder extends ReindexConfigBuilder { + @Override + public ReindexConfig build() { + if (super.exists) { + /* Consider mapping changes */ + MapDifference mappingsDiff = + Maps.difference( + getOrDefault(super.currentMappings, List.of("properties")), + getOrDefault(super.targetMappings, List.of("properties"))); + super.requiresApplyMappings = + !mappingsDiff.entriesDiffering().isEmpty() + || !mappingsDiff.entriesOnlyOnRight().isEmpty(); + super.isPureMappingsAddition = + super.requiresApplyMappings + && mappingsDiff.entriesDiffering().isEmpty() + && !mappingsDiff.entriesOnlyOnRight().isEmpty(); + + if (super.requiresApplyMappings && super.isPureMappingsAddition) { + log.info( + "Index: {} - New fields have been added to index. Adding: {}", + super.name, + mappingsDiff.entriesOnlyOnRight()); + } else if (super.requiresApplyMappings) { + log.info( + "Index: {} - There's diff between new mappings (left) and old mappings (right): {}", + super.name, + mappingsDiff.entriesDiffering()); } - private boolean isAnalysisEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; + /* Consider analysis and settings changes */ + super.requiresApplySettings = !isSettingsEqual() || !isAnalysisEqual(); + super.isSettingsReindex = isSettingsReindexRequired(); + + /* Determine reindexing required - some settings and mappings do not require reindex, analysis always does */ + if (super.requiresApplyMappings && !super.isPureMappingsAddition) { + if (super.enableIndexMappingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's diff between new mappings, however reindexing is DISABLED.", + super.name); + } + } + if (super.isSettingsReindex) { + try { + if (!isAnalysisEqual()) { + log.info( + "Index: {} - There's an update to `analysis` settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - Map indexSettings = (Map) super.targetSettings.get("index"); - if (!indexSettings.containsKey("analysis")) { - return true; + if (!isSettingsEqual()) { + log.info( + "Index: {} - There's an update to settings that requires reindexing. Target: {} Current: {}", + super.name, + OBJECT_MAPPER.writeValueAsString(super.targetSettings), + super.currentSettings); } - // Compare analysis section - Map newAnalysis = (Map) indexSettings.get("analysis"); - Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); - return equalsGroup(newAnalysis, oldAnalysis); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + if (super.enableIndexSettingsReindex) { + super.requiresReindex = true; + } else { + log.warn( + "Index: {} - There's an update to settings that requires reindexing, however reindexing is DISABLED", + super.name); + } } + } + return super.build(); + } - private boolean isSettingsEqual() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return true; - } - Map indexSettings = (Map) super.targetSettings.get("index"); - return SETTINGS.stream() - .allMatch(settingKey -> Objects.equals(indexSettings.get(settingKey).toString(), - super.currentSettings.get("index." + settingKey))); - } + private static TreeMap getOrDefault( + Map map, List path) { + if (map == null) { + return new TreeMap<>(); + } - private boolean isSettingsReindexRequired() { - if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { - return false; - } - Map indexSettings = (Map) super.targetSettings.get("index"); + TreeMap item = + (TreeMap) map.getOrDefault(path.get(0), new TreeMap()); + if (path.size() == 1) { + return item; + } else { + return getOrDefault(item, path.subList(1, path.size())); + } + } - if (SETTINGS_STATIC.stream().anyMatch(settingKey -> - !Objects.equals(indexSettings.get(settingKey).toString(), super.currentSettings.get("index." + settingKey)))) { - return true; - } + private boolean isAnalysisEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map indexSettings = (Map) super.targetSettings.get("index"); + if (!indexSettings.containsKey("analysis")) { + return true; + } + // Compare analysis section + Map newAnalysis = (Map) indexSettings.get("analysis"); + Settings oldAnalysis = super.currentSettings.getByPrefix("index.analysis."); + return equalsGroup(newAnalysis, oldAnalysis); + } - return indexSettings.containsKey("analysis") - && !equalsGroup((Map) indexSettings.get("analysis"), - super.currentSettings.getByPrefix("index.analysis.")); - } + private boolean isSettingsEqual() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return true; + } + Map indexSettings = (Map) super.targetSettings.get("index"); + return SETTINGS.stream() + .allMatch( + settingKey -> + Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey))); } - private static boolean equalsGroup(Map newSettings, Settings oldSettings) { - if (!newSettings.keySet().equals(oldSettings.names())) { - return false; - } + private boolean isSettingsReindexRequired() { + if (super.targetSettings == null || !super.targetSettings.containsKey("index")) { + return false; + } + Map indexSettings = (Map) super.targetSettings.get("index"); - for (String key : newSettings.keySet()) { - // Skip urn stop filter, as adding new entities will cause this filter to change - // No need to reindex every time a new entity is added - if (key.equals("urn_stop_filter")) { - continue; - } - if (newSettings.get(key) instanceof Map) { - if (!equalsGroup((Map) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { - return false; - } - } else if (newSettings.get(key) instanceof List) { - if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { - return false; - } - } else { - if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { - return false; - } - } - } + if (SETTINGS_STATIC.stream() + .anyMatch( + settingKey -> + !Objects.equals( + indexSettings.get(settingKey).toString(), + super.currentSettings.get("index." + settingKey)))) { return true; + } + + return indexSettings.containsKey("analysis") + && !equalsGroup( + (Map) indexSettings.get("analysis"), + super.currentSettings.getByPrefix("index.analysis.")); + } + } + + private static boolean equalsGroup(Map newSettings, Settings oldSettings) { + if (!newSettings.keySet().equals(oldSettings.names())) { + return false; + } + + for (String key : newSettings.keySet()) { + // Skip urn stop filter, as adding new entities will cause this filter to change + // No need to reindex every time a new entity is added + if (key.equals("urn_stop_filter")) { + continue; + } + if (newSettings.get(key) instanceof Map) { + if (!equalsGroup( + (Map) newSettings.get(key), oldSettings.getByPrefix(key + "."))) { + return false; + } + } else if (newSettings.get(key) instanceof List) { + if (!newSettings.get(key).equals(oldSettings.getAsList(key))) { + return false; + } + } else { + if (!newSettings.get(key).toString().equals(oldSettings.get(key))) { + return false; + } + } } + return true; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java index e180c8296b48d..d1eedbbce0495 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/SettingsBuilder.java @@ -2,22 +2,18 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import java.util.List; -import org.apache.commons.io.FilenameUtils; -import org.apache.commons.lang3.StringUtils; -import org.springframework.core.io.Resource; -import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; - -/** - * Builder for generating settings for elasticsearch indices - */ +/** Builder for generating settings for elasticsearch indices */ public class SettingsBuilder { // ElasticSearch Property Map Keys @@ -42,7 +38,7 @@ public class SettingsBuilder { public static final String REPLACEMENT = "replacement"; public static final String PRESERVE_ORIGINAL = "preserve_original"; public static final String SEARCH_ANALYZER = "search_analyzer"; - public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; + public static final String SEARCH_QUOTE_ANALYZER = "search_quote_analyzer"; public static final String CUSTOM_QUOTE_ANALYZER = "quote_analyzer"; public static final String SPLIT_ON_NUMERICS = "split_on_numerics"; public static final String SPLIT_ON_CASE_CHANGE = "split_on_case_change"; @@ -98,9 +94,10 @@ public class SettingsBuilder { public static final String TRIM = "trim"; // MultiFilters - public static final String MULTIFILTER_GRAPH_1 = String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); - public static final String MULTIFILTER_GRAPH_2 = String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, - DEFAULT_SYN_GRAPH); + public static final String MULTIFILTER_GRAPH_1 = + String.join(",", LOWERCASE, STICKY_DELIMITER_GRAPH); + public static final String MULTIFILTER_GRAPH_2 = + String.join(",", LOWERCASE, ALPHANUM_SPACE_ONLY, DEFAULT_SYN_GRAPH); public static final String MULTIFILTER_1 = String.join(",", MULTIFILTER_GRAPH_1, FLATTEN_GRAPH); public static final String MULTIFILTER_2 = String.join(",", MULTIFILTER_GRAPH_2, FLATTEN_GRAPH); @@ -117,20 +114,15 @@ public class SettingsBuilder { public static final String UNIT_SEPARATOR_TOKENIZER = "unit_separator_tokenizer"; public static final String WORD_GRAM_TOKENIZER = "word_gram_tokenizer"; // Do not remove the space, needed for multi-term synonyms - public static final List ALPHANUM_SPACE_PATTERNS = ImmutableList.of( - "([a-z0-9 _-]{2,})", - "([a-z0-9 ]{2,})", - "\\\"([^\\\"]*)\\\"" - ); + public static final List ALPHANUM_SPACE_PATTERNS = + ImmutableList.of("([a-z0-9 _-]{2,})", "([a-z0-9 ]{2,})", "\\\"([^\\\"]*)\\\""); public static final List DATAHUB_STOP_WORDS_LIST = ImmutableList.of("urn", "li"); - public static final List WORD_DELIMITER_TYPE_TABLE = ImmutableList.of( - ": => SUBWORD_DELIM", - "_ => ALPHANUM", - "- => ALPHA" - ); - public static final List INDEX_TOKEN_FILTERS = ImmutableList.of( + public static final List WORD_DELIMITER_TYPE_TABLE = + ImmutableList.of(": => SUBWORD_DELIM", "_ => ALPHANUM", "- => ALPHA"); + public static final List INDEX_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER, TRIM, @@ -143,7 +135,8 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List SEARCH_TOKEN_FILTERS = ImmutableList.of( + public static final List SEARCH_TOKEN_FILTERS = + ImmutableList.of( ASCII_FOLDING, MULTIFILTER_GRAPH, TRIM, @@ -156,25 +149,15 @@ public class SettingsBuilder { UNIQUE, MIN_LENGTH); - public static final List QUOTED_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - REMOVE_QUOTES, - DATAHUB_STOP_WORDS, - STOP, - MIN_LENGTH); + public static final List QUOTED_TOKEN_FILTERS = + ImmutableList.of( + ASCII_FOLDING, LOWERCASE, REMOVE_QUOTES, DATAHUB_STOP_WORDS, STOP, MIN_LENGTH); - public static final List PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - AUTOCOMPLETE_CUSTOM_DELIMITER, - LOWERCASE); + public static final List PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, AUTOCOMPLETE_CUSTOM_DELIMITER, LOWERCASE); - public static final List WORD_GRAM_TOKEN_FILTERS = ImmutableList.of( - ASCII_FOLDING, - LOWERCASE, - TRIM, - REMOVE_QUOTES - ); + public static final List WORD_GRAM_TOKEN_FILTERS = + ImmutableList.of(ASCII_FOLDING, LOWERCASE, TRIM, REMOVE_QUOTES); public final Map settings; @@ -193,7 +176,9 @@ public Map getSettings() { private static Map buildSettings(String mainTokenizer) throws IOException { ImmutableMap.Builder settings = ImmutableMap.builder(); settings.put(MAX_NGRAM_DIFF, 17); - settings.put(ANALYSIS, ImmutableMap.builder() + settings.put( + ANALYSIS, + ImmutableMap.builder() .put(FILTER, buildFilters()) .put(TOKENIZER, buildTokenizers()) .put(NORMALIZER, buildNormalizers()) @@ -203,12 +188,15 @@ private static Map buildSettings(String mainTokenizer) throws IO } private static Map buildFilters() throws IOException { - PathMatchingResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver(); + PathMatchingResourcePatternResolver resourceResolver = + new PathMatchingResourcePatternResolver(); ImmutableMap.Builder filters = ImmutableMap.builder(); // Filter to split string into words - filters.put(AUTOCOMPLETE_CUSTOM_DELIMITER, ImmutableMap.builder() + filters.put( + AUTOCOMPLETE_CUSTOM_DELIMITER, + ImmutableMap.builder() .put(TYPE, WORD_DELIMITER) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -216,7 +204,9 @@ private static Map buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(STICKY_DELIMITER_GRAPH, ImmutableMap.builder() + filters.put( + STICKY_DELIMITER_GRAPH, + ImmutableMap.builder() .put(TYPE, WORD_DELIMITER_GRAPH) .put(SPLIT_ON_NUMERICS, false) .put(SPLIT_ON_CASE_CHANGE, false) @@ -225,22 +215,30 @@ private static Map buildFilters() throws IOException { .put(TYPE_TABLE, WORD_DELIMITER_TYPE_TABLE) .build()); - filters.put(DATAHUB_STOP_WORDS, ImmutableMap.builder() + filters.put( + DATAHUB_STOP_WORDS, + ImmutableMap.builder() .put(TYPE, STOP) .put(IGNORE_CASE, "true") .put(STOPWORDS, DATAHUB_STOP_WORDS_LIST) .build()); - filters.put(MIN_LENGTH, ImmutableMap.builder() - .put(TYPE, "length") - .put("min", "3") - .build()); + filters.put( + MIN_LENGTH, + ImmutableMap.builder().put(TYPE, "length").put("min", "3").build()); - Resource stemOverride = resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { - filters.put(STEM_OVERRIDE, ImmutableMap.builder() + Resource stemOverride = + resourceResolver.getResource("classpath:elasticsearch/stem_override.txt"); + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(stemOverride.getInputStream()))) { + filters.put( + STEM_OVERRIDE, + ImmutableMap.builder() .put(TYPE, "stemmer_override") - .put("rules", reader.lines() + .put( + "rules", + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -248,42 +246,50 @@ private static Map buildFilters() throws IOException { .build()); } - filters.put(ALPHANUM_SPACE_ONLY, ImmutableMap.builder() + filters.put( + ALPHANUM_SPACE_ONLY, + ImmutableMap.builder() .put(TYPE, "pattern_capture") .put(PATTERNS, ALPHANUM_SPACE_PATTERNS) .build()); - filters.put(REMOVE_QUOTES, ImmutableMap.builder() + filters.put( + REMOVE_QUOTES, + ImmutableMap.builder() .put(TYPE, "pattern_replace") .put(PATTERN, "['\"]") .put(REPLACEMENT, "") .build()); // Index Time - filters.put(MULTIFILTER, ImmutableMap.builder() + filters.put( + MULTIFILTER, + ImmutableMap.builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_1, - MULTIFILTER_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_1, MULTIFILTER_2)) .build()); // Search Time - filters.put(MULTIFILTER_GRAPH, ImmutableMap.builder() + filters.put( + MULTIFILTER_GRAPH, + ImmutableMap.builder() .put(TYPE, "multiplexer") - .put(FILTERS, ImmutableList.of( - MULTIFILTER_GRAPH_1, - MULTIFILTER_GRAPH_2 - )) + .put(FILTERS, ImmutableList.of(MULTIFILTER_GRAPH_1, MULTIFILTER_GRAPH_2)) .build()); Resource[] synonyms = resourceResolver.getResources("classpath:elasticsearch/synonyms/*.txt"); - for (Resource syn: synonyms) { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(syn.getInputStream()))) { - filters.put(String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), ImmutableMap.builder() + for (Resource syn : synonyms) { + try (BufferedReader reader = + new BufferedReader(new InputStreamReader(syn.getInputStream()))) { + filters.put( + String.format("%s_syn_graph", FilenameUtils.getBaseName(syn.getFilename())), + ImmutableMap.builder() .put(TYPE, "synonym_graph") .put(LENIENT, "false") - .put(SYNONYMS, reader.lines() + .put( + SYNONYMS, + reader + .lines() .map(String::trim) .map(String::toLowerCase) .filter(line -> !line.isEmpty() && !line.startsWith("#")) @@ -291,15 +297,18 @@ private static Map buildFilters() throws IOException { .build()); } - for (Map.Entry entry : Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { + for (Map.Entry entry : + Map.of(WORD_GRAM_2_FILTER, 2, WORD_GRAM_3_FILTER, 3, WORD_GRAM_4_FILTER, 4).entrySet()) { String filterName = entry.getKey(); Integer gramSize = entry.getValue(); - filters.put(filterName, ImmutableMap.builder() - .put(TYPE, SHINGLE) - .put("min_shingle_size", gramSize) - .put("max_shingle_size", gramSize) - .put("output_unigrams", false) - .build()); + filters.put( + filterName, + ImmutableMap.builder() + .put(TYPE, SHINGLE) + .put("min_shingle_size", gramSize) + .put("max_shingle_size", gramSize) + .put("output_unigrams", false) + .build()); } } @@ -309,20 +318,16 @@ private static Map buildFilters() throws IOException { private static Map buildTokenizers() { ImmutableMap.Builder tokenizers = ImmutableMap.builder(); // Tokenize by slashes - tokenizers.put(SLASH_TOKENIZER, - ImmutableMap.builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[/]") - .build()); + tokenizers.put( + SLASH_TOKENIZER, + ImmutableMap.builder().put(TYPE, PATTERN).put(PATTERN, "[/]").build()); + tokenizers.put( + UNIT_SEPARATOR_TOKENIZER, + ImmutableMap.builder().put(TYPE, PATTERN).put(PATTERN, "[␟]").build()); - tokenizers.put(UNIT_SEPARATOR_TOKENIZER, - ImmutableMap.builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[␟]") - .build()); - - tokenizers.put(UNIT_SEPARATOR_PATH_TOKENIZER, + tokenizers.put( + UNIT_SEPARATOR_PATH_TOKENIZER, ImmutableMap.builder() .put(TYPE, PATH_HIERARCHY_TOKENIZER) .put(DELIMITER, "␟") @@ -331,16 +336,15 @@ private static Map buildTokenizers() { // Tokenize by most special chars // Do NOT tokenize by whitespace to keep multi-word synonyms in the same token // The split by whitespace is done later in the token filters phase - tokenizers.put(MAIN_TOKENIZER, - ImmutableMap.builder() - .put(TYPE, PATTERN) - .put(PATTERN, "[(),./:]") - .build()); + tokenizers.put( + MAIN_TOKENIZER, + ImmutableMap.builder().put(TYPE, PATTERN).put(PATTERN, "[(),./:]").build()); // Tokenize by whitespace and most special chars for wordgrams // only split on - when not preceded by a whitespace to preserve exclusion functionality // i.e. "logging-events-bkcp" and "logging-events -bckp" should be handled differently - tokenizers.put(WORD_GRAM_TOKENIZER, + tokenizers.put( + WORD_GRAM_TOKENIZER, ImmutableMap.builder() .put(TYPE, PATTERN) .put(PATTERN, "[(),./:\\s_]|(?<=\\S)(-)") @@ -353,8 +357,11 @@ private static Map buildTokenizers() { private static Map buildNormalizers() { ImmutableMap.Builder normalizers = ImmutableMap.builder(); // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - normalizers.put(KEYWORD_NORMALIZER, - ImmutableMap.builder().put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)).build()); + normalizers.put( + KEYWORD_NORMALIZER, + ImmutableMap.builder() + .put(FILTER, ImmutableList.of(LOWERCASE, ASCII_FOLDING)) + .build()); return normalizers.build(); } @@ -364,90 +371,119 @@ private static Map buildAnalyzers(String mainTokenizer) { ImmutableMap.Builder analyzers = ImmutableMap.builder(); // Analyzer for splitting by slashes (used to get depth of browsePath) - analyzers.put(SLASH_PATTERN_ANALYZER, ImmutableMap.builder() + analyzers.put( + SLASH_PATTERN_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, SLASH_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for splitting by unit-separator (used to get depth of browsePathV2) - analyzers.put(UNIT_SEPARATOR_PATTERN_ANALYZER, ImmutableMap.builder() + analyzers.put( + UNIT_SEPARATOR_PATTERN_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, UNIT_SEPARATOR_TOKENIZER) .put(FILTER, ImmutableList.of(LOWERCASE)) .build()); // Analyzer for matching browse path - analyzers.put(BROWSE_PATH_HIERARCHY_ANALYZER, ImmutableMap.builder() - .put(TOKENIZER, PATH_HIERARCHY_TOKENIZER) - .build()); + analyzers.put( + BROWSE_PATH_HIERARCHY_ANALYZER, + ImmutableMap.builder().put(TOKENIZER, PATH_HIERARCHY_TOKENIZER).build()); // Analyzer for matching browse path v2 - analyzers.put(BROWSE_PATH_V2_HIERARCHY_ANALYZER, ImmutableMap.builder() + analyzers.put( + BROWSE_PATH_V2_HIERARCHY_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, UNIT_SEPARATOR_PATH_TOKENIZER) .build()); // Analyzer for case-insensitive exact matching - Only used when building queries - analyzers.put(KEYWORD_LOWERCASE_ANALYZER, ImmutableMap.builder() + analyzers.put( + KEYWORD_LOWERCASE_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, ImmutableList.of("trim", LOWERCASE, ASCII_FOLDING, SNOWBALL)) .build()); // Analyzer for quotes words - analyzers.put(CUSTOM_QUOTE_ANALYZER, ImmutableMap.builder() + analyzers.put( + CUSTOM_QUOTE_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, KEYWORD_TOKENIZER) .put(FILTER, QUOTED_TOKEN_FILTERS) .build()); // Analyzer for text tokenized into words (split by spaces, periods, and slashes) - analyzers.put(TEXT_ANALYZER, ImmutableMap.builder() + analyzers.put( + TEXT_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(TEXT_SEARCH_ANALYZER, ImmutableMap.builder() + analyzers.put( + TEXT_SEARCH_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Analyzer for getting urn components - analyzers.put(URN_ANALYZER, ImmutableMap.builder() + analyzers.put( + URN_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, INDEX_TOKEN_FILTERS) .build()); - analyzers.put(URN_SEARCH_ANALYZER, ImmutableMap.builder() + analyzers.put( + URN_SEARCH_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, SEARCH_TOKEN_FILTERS) .build()); // Support word grams - for (Map.Entry entry : Map.of( - WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, - WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, - WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER).entrySet()) { + for (Map.Entry entry : + Map.of( + WORD_GRAM_2_ANALYZER, WORD_GRAM_2_FILTER, + WORD_GRAM_3_ANALYZER, WORD_GRAM_3_FILTER, + WORD_GRAM_4_ANALYZER, WORD_GRAM_4_FILTER) + .entrySet()) { String analyzerName = entry.getKey(); String filterName = entry.getValue(); - analyzers.put(analyzerName, ImmutableMap.builder() - .put(TOKENIZER, WORD_GRAM_TOKENIZER) - .put(FILTER, ImmutableList.builder() - .addAll(WORD_GRAM_TOKEN_FILTERS) - .add(filterName).build()) - .build()); + analyzers.put( + analyzerName, + ImmutableMap.builder() + .put(TOKENIZER, WORD_GRAM_TOKENIZER) + .put( + FILTER, + ImmutableList.builder() + .addAll(WORD_GRAM_TOKEN_FILTERS) + .add(filterName) + .build()) + .build()); } - // For special analysis, the substitution can be read from the configuration (chinese tokenizer: ik_smart / smartCN) + // For special analysis, the substitution can be read from the configuration (chinese tokenizer: + // ik_smart / smartCN) // Analyzer for partial matching (i.e. autocomplete) - Prefix matching of each token - analyzers.put(PARTIAL_ANALYZER, ImmutableMap.builder() + analyzers.put( + PARTIAL_ANALYZER, + ImmutableMap.builder() .put(TOKENIZER, StringUtils.isNotBlank(mainTokenizer) ? mainTokenizer : MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); // Analyzer for partial matching urn components - analyzers.put(PARTIAL_URN_COMPONENT, ImmutableMap.builder() + analyzers.put( + PARTIAL_URN_COMPONENT, + ImmutableMap.builder() .put(TOKENIZER, MAIN_TOKENIZER) .put(FILTER, PARTIAL_AUTOCOMPLETE_TOKEN_FILTERS) .build()); - return analyzers.build(); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java index 5fd0a80d23c50..5ea60b24a577a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESBrowseDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; + import com.codahale.metrics.Timer; import com.datahub.util.exception.ESQueryException; import com.google.common.annotations.VisibleForTesting; @@ -54,9 +56,6 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.utils.SearchUtil.filterSoftDeletedByDefault; - - @Slf4j @RequiredArgsConstructor public class ESBrowseDAO { @@ -64,10 +63,8 @@ public class ESBrowseDAO { private final EntityRegistry entityRegistry; private final RestHighLevelClient client; private final IndexConvention indexConvention; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; private static final String BROWSE_PATH = "browsePaths"; private static final String BROWSE_PATH_DEPTH = "browsePaths.length"; @@ -107,19 +104,26 @@ private class BrowseGroupsResultV2 { * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filters, int from, + public BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filters, + int from, int size) { final Map requestMap = SearchUtils.getRequestMap(filters); try { - final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); + final String indexName = + indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { groupsResponse = - client.search(constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequest(indexName, path, requestMap), RequestOptions.DEFAULT); } - final BrowseGroupsResult browseGroupsResult = extractGroupsResponse(groupsResponse, path, from, size); + final BrowseGroupsResult browseGroupsResult = + extractGroupsResponse(groupsResponse, path, from, size); final int numGroups = browseGroupsResult.getTotalGroups(); // Based on the number of groups returned, compute the from and size to query for entities @@ -131,14 +135,19 @@ public BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nu final SearchResponse entitiesResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esEntitiesSearch").time()) { entitiesResponse = - client.search(constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), + client.search( + constructEntitiesSearchRequest(indexName, path, requestMap, entityFrom, entitySize), RequestOptions.DEFAULT); } final int numEntities = (int) entitiesResponse.getHits().getTotalHits().value; - final List browseResultEntityList = extractEntitiesResponse(entitiesResponse, path); - - return new BrowseResult().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + final List browseResultEntityList = + extractEntitiesResponse(entitiesResponse, path); + + return new BrowseResult() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setEntities(new BrowseResultEntityArray(browseResultEntityList)) .setGroups(new BrowseResultGroupArray(browseGroupsResult.getGroups())) .setNumEntities(numEntities) @@ -176,8 +185,8 @@ private AggregationBuilder buildAggregations(@Nonnull String path) { * @return {@link SearchRequest} */ @Nonnull - protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map requestMap) { + protected SearchRequest constructGroupsSearchRequest( + @Nonnull String indexName, @Nonnull String path, @Nonnull Map requestMap) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); @@ -196,8 +205,8 @@ protected SearchRequest constructGroupsSearchRequest(@Nonnull String indexName, * @return {@link QueryBuilder} */ @Nonnull - private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map requestMap, - boolean isGroupQuery) { + private QueryBuilder buildQueryString( + @Nonnull String path, @Nonnull Map requestMap, boolean isGroupQuery) { final int browseDepthVal = getPathDepth(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); @@ -229,13 +238,17 @@ private QueryBuilder buildQueryString(@Nonnull String path, @Nonnull Map requestMap, int from, int size) { + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map requestMap, + int from, + int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.from(from); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -254,8 +267,13 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull */ @VisibleForTesting @Nonnull - SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull String path, - @Nonnull Map requestMap, @Nullable Object[] sort, @Nullable String pitId, @Nonnull String keepAlive, + SearchRequest constructEntitiesSearchRequest( + @Nonnull String indexName, + @Nonnull String path, + @Nonnull Map requestMap, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, int size) { final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -263,7 +281,7 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); searchSourceBuilder.size(size); - searchSourceBuilder.fetchSource(new String[]{BROWSE_PATH, URN}, null); + searchSourceBuilder.fetchSource(new String[] {BROWSE_PATH, URN}, null); searchSourceBuilder.sort(URN, SortOrder.ASC); searchSourceBuilder.query(buildQueryString(path, requestMap, false)); searchRequest.source(searchSourceBuilder); @@ -278,19 +296,24 @@ SearchRequest constructEntitiesSearchRequest(@Nonnull String indexName, @Nonnull * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResult extractGroupsResponse( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List groupsAgg = groups.getBuckets() - .stream() - .map(group -> new BrowseResultGroup().setName(getSimpleName(group.getKeyAsString())) - .setCount(group.getDocCount())) - .collect(Collectors.toList()); + final List groupsAgg = + groups.getBuckets().stream() + .map( + group -> + new BrowseResultGroup() + .setName(getSimpleName(group.getKeyAsString())) + .setCount(group.getDocCount())) + .collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResult(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResult( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } /** @@ -301,18 +324,22 @@ private BrowseGroupsResult extractGroupsResponse(@Nonnull SearchResponse groupsR */ @VisibleForTesting @Nonnull - List extractEntitiesResponse(@Nonnull SearchResponse entitiesResponse, - @Nonnull String currentPath) { + List extractEntitiesResponse( + @Nonnull SearchResponse entitiesResponse, @Nonnull String currentPath) { final List entityMetadataArray = new ArrayList<>(); - Arrays.stream(entitiesResponse.getHits().getHits()).forEach(hit -> { - try { - final List allPaths = (List) hit.getSourceAsMap().get(BROWSE_PATH); - entityMetadataArray.add(new BrowseResultEntity().setName((String) hit.getSourceAsMap().get(URN)) - .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); - } catch (URISyntaxException e) { - log.error("URN is not valid: " + e.toString()); - } - }); + Arrays.stream(entitiesResponse.getHits().getHits()) + .forEach( + hit -> { + try { + final List allPaths = (List) hit.getSourceAsMap().get(BROWSE_PATH); + entityMetadataArray.add( + new BrowseResultEntity() + .setName((String) hit.getSourceAsMap().get(URN)) + .setUrn(Urn.createFromString((String) hit.getSourceAsMap().get(URN)))); + } catch (URISyntaxException e) { + log.error("URN is not valid: " + e.toString()); + } + }); return entityMetadataArray; } @@ -344,7 +371,8 @@ private static int getPathDepth(@Nonnull String path) { public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); + searchRequest.source( + new SearchSourceBuilder().query(QueryBuilders.termQuery(URN, urn.toString()))); final SearchHit[] searchHits; try { searchHits = client.search(searchRequest, RequestOptions.DEFAULT).getHits().getHits(); @@ -363,20 +391,32 @@ public List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn) return (List) sourceMap.get(BROWSE_PATH); } - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count) { try { final SearchResponse groupsResponse; try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "esGroupSearch").time()) { final String finalInput = input.isEmpty() ? "*" : input; groupsResponse = - client.search(constructGroupsSearchRequestV2(entityName, path, filter, finalInput), RequestOptions.DEFAULT); + client.search( + constructGroupsSearchRequestV2(entityName, path, filter, finalInput), + RequestOptions.DEFAULT); } - final BrowseGroupsResultV2 browseGroupsResult = extractGroupsResponseV2(groupsResponse, path, start, count); + final BrowseGroupsResultV2 browseGroupsResult = + extractGroupsResponseV2(groupsResponse, path, start, count); final int numGroups = browseGroupsResult.getTotalGroups(); - return new BrowseResultV2().setMetadata( - new BrowseResultMetadata().setTotalNumEntities(browseGroupsResult.getTotalNumEntities()).setPath(path)) + return new BrowseResultV2() + .setMetadata( + new BrowseResultMetadata() + .setTotalNumEntities(browseGroupsResult.getTotalNumEntities()) + .setPath(path)) .setGroups(new BrowseResultGroupV2Array(browseGroupsResult.getGroups())) .setNumGroups(numGroups) .setFrom(start) @@ -388,12 +428,21 @@ public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, } @Nonnull - private SearchRequest constructGroupsSearchRequestV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private SearchRequest constructGroupsSearchRequestV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); final SearchRequest searchRequest = new SearchRequest(indexName); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(0); - searchSourceBuilder.query(buildQueryStringV2(entityName, path, SearchUtil.transformFilterForEntities(filter, indexConvention), input)); + searchSourceBuilder.query( + buildQueryStringV2( + entityName, + path, + SearchUtil.transformFilterForEntities(filter, indexConvention), + input)); searchSourceBuilder.aggregation(buildAggregationsV2(path)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -412,21 +461,24 @@ private String getSimpleNameV2(@Nonnull String path) { return path.substring(path.lastIndexOf(BROWSE_V2_DELIMITER) + 1); } - private static int getPathDepthV2(@Nonnull String path) { return StringUtils.countMatches(path, BROWSE_V2_DELIMITER); } @Nonnull - private QueryBuilder buildQueryStringV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input) { + private QueryBuilder buildQueryStringV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input) { final int browseDepthVal = getPathDepthV2(path); final BoolQueryBuilder queryBuilder = QueryBuilders.boolQuery(); EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - QueryBuilder query = SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getQuery(input, false); + QueryBuilder query = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getQuery(input, false); queryBuilder.must(query); filterSoftDeletedByDefault(filter, queryBuilder); @@ -467,19 +519,19 @@ private AggregationBuilder buildAggregationsV2(@Nonnull String path) { * @return {@link BrowseResultMetadata} */ @Nonnull - private BrowseGroupsResultV2 extractGroupsResponseV2(@Nonnull SearchResponse groupsResponse, @Nonnull String path, - int from, int size) { + private BrowseGroupsResultV2 extractGroupsResponseV2( + @Nonnull SearchResponse groupsResponse, @Nonnull String path, int from, int size) { final ParsedTerms groups = groupsResponse.getAggregations().get(GROUP_AGG); - final List groupsAgg = groups.getBuckets() - .stream() - .map(this::mapBrowseResultGroupV2) - .collect(Collectors.toList()); + final List groupsAgg = + groups.getBuckets().stream().map(this::mapBrowseResultGroupV2).collect(Collectors.toList()); // Get the groups that are in the from to from + size range - final List paginatedGroups = groupsAgg.size() <= from ? Collections.emptyList() - : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); - return new BrowseGroupsResultV2(paginatedGroups, groupsAgg.size(), - (int) groupsResponse.getHits().getTotalHits().value); + final List paginatedGroups = + groupsAgg.size() <= from + ? Collections.emptyList() + : groupsAgg.subList(from, Math.min(from + size, groupsAgg.size())); + return new BrowseGroupsResultV2( + paginatedGroups, groupsAgg.size(), (int) groupsResponse.getHits().getTotalHits().value); } private boolean hasSubGroups(Terms.Bucket group) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 960a5b38826b1..0718448a6453e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -1,12 +1,16 @@ package com.linkedin.metadata.search.elasticsearch.query; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.codahale.metrics.Timer; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.datahub.util.exception.ESQueryException; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.annotations.VisibleForTesting; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.AutoCompleteResult; @@ -45,24 +49,18 @@ import org.opensearch.client.core.CountRequest; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - - -/** - * A search DAO for Elasticsearch backend. - */ +/** A search DAO for Elasticsearch backend. */ @Slf4j @RequiredArgsConstructor public class ESSearchDAO { private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -73,15 +71,14 @@ public class ESSearchDAO { private final IndexConvention indexConvention; private final boolean pointInTimeCreationEnabled; private final String elasticSearchImplementation; - @Nonnull - private final SearchConfiguration searchConfiguration; - @Nullable - private final CustomSearchConfiguration customSearchConfiguration; + @Nonnull private final SearchConfiguration searchConfiguration; + @Nullable private final CustomSearchConfiguration customSearchConfiguration; public long docCount(@Nonnull String entityName) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); CountRequest countRequest = - new CountRequest(indexConvention.getIndexName(entitySpec)).query(SearchRequestHandler.getFilterQuery(null)); + new CountRequest(indexConvention.getIndexName(entitySpec)) + .query(SearchRequestHandler.getFilterQuery(null)); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "docCount").time()) { return client.count(countRequest, RequestOptions.DEFAULT).getCount(); } catch (IOException e) { @@ -92,15 +89,21 @@ public long docCount(@Nonnull String entityName) { @Nonnull @WithSpan - private SearchResult executeAndExtract(@Nonnull List entitySpec, @Nonnull SearchRequest searchRequest, - @Nullable Filter filter, int from, int size) { + private SearchResult executeAndExtract( + @Nonnull List entitySpec, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + int from, + int size) { long id = System.currentTimeMillis(); - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_search").time()) { log.debug("Executing request {}: {}", id, searchRequest); final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpec, searchConfiguration, customSearchConfiguration) .extractResult(searchResponse, filter, from, size)); } catch (Exception e) { log.error("Search query failed", e); @@ -116,33 +119,47 @@ private String transformIndexToken(String name, int entityTypeIdx) { } String[] tokens = name.split(AGGREGATION_SEPARATOR_CHAR); if (entityTypeIdx < tokens.length) { - tokens[entityTypeIdx] = indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); + tokens[entityTypeIdx] = + indexConvention.getEntityName(tokens[entityTypeIdx]).orElse(tokens[entityTypeIdx]); } return String.join(AGGREGATION_SEPARATOR_CHAR, tokens); } - private AggregationMetadata transformAggregationMetadata(@Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { + private AggregationMetadata transformAggregationMetadata( + @Nonnull AggregationMetadata aggMeta, int entityTypeIdx) { if (entityTypeIdx >= 0) { - aggMeta.setAggregations(new LongMap( - aggMeta.getAggregations().entrySet().stream().collect( - Collectors.toMap(entry -> transformIndexToken(entry.getKey(), entityTypeIdx), Map.Entry::getValue)))); + aggMeta.setAggregations( + new LongMap( + aggMeta.getAggregations().entrySet().stream() + .collect( + Collectors.toMap( + entry -> transformIndexToken(entry.getKey(), entityTypeIdx), + Map.Entry::getValue)))); aggMeta.setFilterValues( new FilterValueArray( - aggMeta.getFilterValues().stream().map( - filterValue -> filterValue.setValue(transformIndexToken(filterValue.getValue(), entityTypeIdx))) - .collect(Collectors.toList()) - )); - + aggMeta.getFilterValues().stream() + .map( + filterValue -> + filterValue.setValue( + transformIndexToken(filterValue.getValue(), entityTypeIdx))) + .collect(Collectors.toList()))); } return aggMeta; } @VisibleForTesting public SearchResult transformIndexIntoEntityName(SearchResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } + private ScrollResult transformIndexIntoEntityName(ScrollResult result) { - return result.setMetadata(result.getMetadata().setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); + return result.setMetadata( + result + .getMetadata() + .setAggregations(transformIndexIntoEntityName(result.getMetadata().getAggregations()))); } private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadataArray aggArray) { @@ -157,15 +174,22 @@ private AggregationMetadataArray transformIndexIntoEntityName(AggregationMetadat @Nonnull @WithSpan - private ScrollResult executeAndExtract(@Nonnull List entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, - @Nullable String scrollId, @Nullable String keepAlive, int size) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { + private ScrollResult executeAndExtract( + @Nonnull List entitySpecs, + @Nonnull SearchRequest searchRequest, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "executeAndExtract_scroll").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well - return transformIndexIntoEntityName(SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .extractScrollResult(searchResponse, - filter, scrollId, keepAlive, size, supportsPointInTime())); + return transformIndexIntoEntityName( + SearchRequestHandler.getBuilder( + entitySpecs, searchConfiguration, customSearchConfiguration) + .extractScrollResult( + searchResponse, filter, scrollId, keepAlive, size, supportsPointInTime())); } catch (Exception e) { log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); @@ -173,32 +197,42 @@ private ScrollResult executeAndExtract(@Nonnull List entitySpecs, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags Structured or full text search modes, plus other misc options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List facets) { + public SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List facets) { final String finalInput = input.isEmpty() ? "*" : input; Timer.Context searchRequestTimer = MetricUtils.timer(this.getClass(), "searchRequest").time(); - List entitySpecs = entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); + List entitySpecs = + entityNames.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); - searchRequest.indices(entityNames.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new)); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, transformedFilters, sortCriterion, from, size, searchFlags, facets); + searchRequest.indices( + entityNames.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new)); searchRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well return executeAndExtract(entitySpecs, searchRequest, transformedFilters, from, size); @@ -207,21 +241,26 @@ public SearchResult search(@Nonnull List entityNames, @Nonnull String in /** * Gets a list of documents after applying the input filters. * - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, - @Nullable SortCriterion sortCriterion, int from, int size) { + public SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); Filter transformedFilters = transformFilterForEntities(filters, indexConvention); final SearchRequest searchRequest = - SearchRequestHandler - .getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) - .getFilterRequest(transformedFilters, sortCriterion, from, size); + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, customSearchConfiguration) + .getFilterRequest(transformedFilters, sortCriterion, from, size); searchRequest.indices(indexConvention.getIndexName(entitySpec)); return executeAndExtract(List.of(entitySpec), searchRequest, transformedFilters, from, size); @@ -230,7 +269,8 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, /** * Returns a list of suggestions given type ahead query. * - *

The advanced auto complete can take filters and provides suggestions based on filtered context. + *

The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param query the type ahead query text * @param field the field name for the auto complete @@ -239,12 +279,18 @@ public SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, * @return A list of suggestions as string */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit) { + public AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit) { try { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); AutocompleteRequestHandler builder = AutocompleteRequestHandler.getBuilder(entitySpec); - SearchRequest req = builder.getSearchRequest(query, field, transformFilterForEntities(requestParams, indexConvention), limit); + SearchRequest req = + builder.getSearchRequest( + query, field, transformFilterForEntities(requestParams, indexConvention), limit); req.indices(indexConvention.getIndexName(entitySpec)); SearchResponse searchResponse = client.search(req, RequestOptions.DEFAULT); return builder.extractResult(searchResponse, query); @@ -264,18 +310,27 @@ public AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull Stri * @return */ @Nonnull - public Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit) { - final SearchRequest searchRequest = SearchRequestHandler.getAggregationRequest(field, transformFilterForEntities(requestParams, indexConvention), limit); + public Map aggregateByValue( + @Nullable List entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit) { + final SearchRequest searchRequest = + SearchRequestHandler.getAggregationRequest( + field, transformFilterForEntities(requestParams, indexConvention), limit); if (entityNames == null) { String indexName = indexConvention.getAllEntityIndicesPattern(); searchRequest.indices(indexName); } else { - Stream stream = entityNames.stream().map(entityRegistry::getEntitySpec).map(indexConvention::getIndexName); + Stream stream = + entityNames.stream() + .map(entityRegistry::getEntitySpec) + .map(indexConvention::getIndexName); searchRequest.indices(stream.toArray(String[]::new)); } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "aggregateByValue_search").time()) { final SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); // extract results, validated against document model as well return SearchRequestHandler.extractTermAggregations(searchResponse, field); @@ -286,28 +341,35 @@ public Map aggregateByValue(@Nullable List entityNames, @N } /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll Id to convert to a PIT ID and Sort array to pass to ElasticSearch * @param keepAlive string representation of the time to keep a point in time alive * @param size the number of search hits to return - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public ScrollResult scroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, SearchFlags searchFlags) { + public ScrollResult scroll( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { final String finalInput = input.isEmpty() ? "*" : input; - String[] indexArray = entities.stream() - .map(indexConvention::getEntityIndexName) - .toArray(String[]::new); + String[] indexArray = + entities.stream().map(indexConvention::getEntityIndexName).toArray(String[]::new); Timer.Context scrollRequestTimer = MetricUtils.timer(this.getClass(), "scrollRequest").time(); - List entitySpecs = entities.stream() - .map(entityRegistry::getEntitySpec) - .collect(Collectors.toList()); + List entitySpecs = + entities.stream().map(entityRegistry::getEntitySpec).collect(Collectors.toList()); String pitId = null; Object[] sort = null; if (scrollId != null) { @@ -326,39 +388,55 @@ public ScrollResult scroll(@Nonnull List entities, @Nonnull String input Filter transformedFilters = transformFilterForEntities(postFilters, indexConvention); // Step 1: construct the query - final SearchRequest searchRequest = SearchRequestHandler - .getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) - .getSearchRequest(finalInput, transformedFilters, sortCriterion, sort, pitId, keepAlive, size, searchFlags); + final SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpecs, searchConfiguration, customSearchConfiguration) + .getSearchRequest( + finalInput, + transformedFilters, + sortCriterion, + sort, + pitId, + keepAlive, + size, + searchFlags); - // PIT specifies indices in creation so it doesn't support specifying indices on the request, so we only specify if not using PIT + // PIT specifies indices in creation so it doesn't support specifying indices on the request, so + // we only specify if not using PIT if (!supportsPointInTime()) { searchRequest.indices(indexArray); } scrollRequestTimer.stop(); // Step 2: execute the query and extract results, validated against document model as well - return executeAndExtract(entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); + return executeAndExtract( + entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); } public Optional raw(@Nonnull String indexName, @Nullable String jsonQuery) { - return Optional.ofNullable(jsonQuery).map(json -> { - try { - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, json); - SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); + return Optional.ofNullable(jsonQuery) + .map( + json -> { + try { + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, json); + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(parser); - SearchRequest searchRequest = new SearchRequest(indexConvention.getIndexName(indexName)); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = + new SearchRequest(indexConvention.getIndexName(indexName)); + searchRequest.source(searchSourceBuilder); - return client.search(searchRequest, RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + return client.search(searchRequest, RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } private boolean supportsPointInTime() { - return pointInTimeCreationEnabled && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); + return pointInTimeCreationEnabled + && ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH.equalsIgnoreCase(elasticSearchImplementation); } private String createPointInTime(String[] indexArray, String keepAlive) { @@ -367,8 +445,8 @@ private String createPointInTime(String[] indexArray, String keepAlive) { request.addParameter("keep_alive", keepAlive); try { Response response = client.getLowLevelClient().performRequest(request); - Map mappedResponse = OBJECT_MAPPER.readValue(response.getEntity().getContent(), - new TypeReference<>() { }); + Map mappedResponse = + OBJECT_MAPPER.readValue(response.getEntity().getContent(), new TypeReference<>() {}); return (String) mappedResponse.get("id"); } catch (IOException e) { log.error("Failed to generate PointInTime Identifier.", e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java index e2bdea84eda0e..7a8056c0b59d1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AggregationQueryBuilder.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.annotation.SearchableAnnotation; import com.linkedin.metadata.search.utils.ESUtils; @@ -14,9 +16,6 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregationBuilders; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class AggregationQueryBuilder { @@ -32,43 +31,51 @@ public AggregationQueryBuilder( this._allFacetFields = getAllFacetFields(annotations); } - /** - * Get the set of default aggregations, across all facets. - */ + /** Get the set of default aggregations, across all facets. */ public List getAggregations() { return getAggregations(null); } /** - * Get aggregations for a search request for the given facets provided, and if none are provided, then get aggregations for all. + * Get aggregations for a search request for the given facets provided, and if none are provided, + * then get aggregations for all. */ public List getAggregations(@Nullable List facets) { final Set facetsToAggregate; if (facets != null) { - facets.stream().filter(f -> !isValidAggregate(f)).forEach(facet -> { - log.warn(String.format("Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", facet, - _defaultFacetFields)); - }); - facetsToAggregate = facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); + facets.stream() + .filter(f -> !isValidAggregate(f)) + .forEach( + facet -> { + log.warn( + String.format( + "Requested facet for search filter aggregations that isn't part of the default filters. Provided: %s; Available: %s", + facet, _defaultFacetFields)); + }); + facetsToAggregate = + facets.stream().filter(this::isValidAggregate).collect(Collectors.toSet()); } else { facetsToAggregate = _defaultFacetFields; } - return facetsToAggregate.stream().map(this::facetToAggregationBuilder).collect(Collectors.toList()); + return facetsToAggregate.stream() + .map(this::facetToAggregationBuilder) + .collect(Collectors.toList()); } - private Set getDefaultFacetFields(final List annotations) { - Set facets = annotations.stream() - .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set facets = + annotations.stream() + .flatMap(annotation -> getDefaultFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } private Set getAllFacetFields(final List annotations) { - Set facets = annotations.stream() - .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) - .collect(Collectors.toSet()); + Set facets = + annotations.stream() + .flatMap(annotation -> getAllFacetFieldsFromAnnotation(annotation).stream()) + .collect(Collectors.toSet()); facets.add(INDEX_VIRTUAL_FIELD); return facets; } @@ -129,4 +136,4 @@ List getAllFacetFieldsFromAnnotation(final SearchableAnnotation annotati } return facetsFromAnnotation; } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index bba3a9fa4232d..cdcdae2f3d311 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; @@ -34,33 +36,32 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; - - @Slf4j public class AutocompleteRequestHandler { private final List _defaultAutocompleteFields; - private static final Map AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = - new ConcurrentHashMap<>(); + private static final Map + AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); public AutocompleteRequestHandler(@Nonnull EntitySpec entitySpec) { - _defaultAutocompleteFields = Stream.concat(entitySpec.getSearchableFieldSpecs() - .stream() - .map(SearchableFieldSpec::getSearchableAnnotation) - .filter(SearchableAnnotation::isEnableAutocomplete) - .map(SearchableAnnotation::getFieldName), - Stream.of("urn")) - .collect(Collectors.toList()); + _defaultAutocompleteFields = + Stream.concat( + entitySpec.getSearchableFieldSpecs().stream() + .map(SearchableFieldSpec::getSearchableAnnotation) + .filter(SearchableAnnotation::isEnableAutocomplete) + .map(SearchableAnnotation::getFieldName), + Stream.of("urn")) + .collect(Collectors.toList()); } public static AutocompleteRequestHandler getBuilder(@Nonnull EntitySpec entitySpec) { - return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent(entitySpec, - k -> new AutocompleteRequestHandler(entitySpec)); + return AUTOCOMPLETE_QUERY_BUILDER_BY_ENTITY_NAME.computeIfAbsent( + entitySpec, k -> new AutocompleteRequestHandler(entitySpec)); } - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { + public SearchRequest getSearchRequest( + @Nonnull String input, @Nullable String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.size(limit); @@ -78,25 +79,27 @@ private QueryBuilder getQuery(@Nonnull String query, @Nullable String field) { public static QueryBuilder getQuery(List autocompleteFields, @Nonnull String query) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); // Search for exact matches with higher boost and ngram matches - MultiMatchQueryBuilder autocompleteQueryBuilder = QueryBuilders.multiMatchQuery(query) - .type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); - - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - autocompleteFields.forEach(fieldName -> { - if ("urn".equals(fieldName)) { - autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); - } else { - autocompleteQueryBuilder.field(fieldName + ".ngram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); - autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); - } - - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); - }); + MultiMatchQueryBuilder autocompleteQueryBuilder = + QueryBuilders.multiMatchQuery(query).type(MultiMatchQueryBuilder.Type.BOOL_PREFIX); + + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + autocompleteFields.forEach( + fieldName -> { + if ("urn".equals(fieldName)) { + autocompleteQueryBuilder.field(fieldName + ".ngram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram", urnBoost); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram", urnBoost); + } else { + autocompleteQueryBuilder.field(fieldName + ".ngram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._2gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._3gram"); + autocompleteQueryBuilder.field(fieldName + ".ngram._4gram"); + } + + finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(fieldName + ".delimited", query)); + }); finalQuery.should(autocompleteQueryBuilder); @@ -111,11 +114,14 @@ private HighlightBuilder getHighlights(@Nullable String field) { highlightBuilder.preTags(""); highlightBuilder.postTags(""); // Check for each field name and any subfields - getAutocompleteFields(field).forEach(fieldName -> highlightBuilder - .field(fieldName) - .field(fieldName + ".*") - .field(fieldName + ".ngram") - .field(fieldName + ".delimited")); + getAutocompleteFields(field) + .forEach( + fieldName -> + highlightBuilder + .field(fieldName) + .field(fieldName + ".*") + .field(fieldName + ".ngram") + .field(fieldName + ".delimited")); return highlightBuilder; } @@ -126,19 +132,20 @@ private List getAutocompleteFields(@Nullable String field) { return _defaultAutocompleteFields; } - public AutoCompleteResult extractResult(@Nonnull SearchResponse searchResponse, @Nonnull String input) { + public AutoCompleteResult extractResult( + @Nonnull SearchResponse searchResponse, @Nonnull String input) { Set results = new LinkedHashSet<>(); Set entityResults = new HashSet<>(); for (SearchHit hit : searchResponse.getHits()) { - Optional matchedFieldValue = hit.getHighlightFields() - .entrySet() - .stream() - .findFirst() - .map(entry -> entry.getValue().getFragments()[0].string()); + Optional matchedFieldValue = + hit.getHighlightFields().entrySet().stream() + .findFirst() + .map(entry -> entry.getValue().getFragments()[0].string()); Optional matchedUrn = Optional.ofNullable((String) hit.getSourceAsMap().get("urn")); try { if (matchedUrn.isPresent()) { - entityResults.add(new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); + entityResults.add( + new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); } } catch (URISyntaxException e) { throw new RuntimeException(String.format("Failed to create urn %s", matchedUrn.get()), e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java index 55a3474fd9f35..478d633fe3c55 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/CustomizedQueryHandler.java @@ -2,42 +2,43 @@ import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.regex.Pattern; import java.util.stream.Collectors; - +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder(builderMethodName = "hiddenBuilder") @Getter public class CustomizedQueryHandler { - private CustomSearchConfiguration customSearchConfiguration; - @Builder.Default - private List> queryConfigurations = List.of(); + private CustomSearchConfiguration customSearchConfiguration; - public Optional lookupQueryConfig(String query) { - return queryConfigurations.stream() - .filter(e -> e.getKey().matcher(query).matches()) - .map(Map.Entry::getValue) - .findFirst(); - } + @Builder.Default + private List> queryConfigurations = List.of(); + + public Optional lookupQueryConfig(String query) { + return queryConfigurations.stream() + .filter(e -> e.getKey().matcher(query).matches()) + .map(Map.Entry::getValue) + .findFirst(); + } - public static CustomizedQueryHandlerBuilder builder(@Nullable CustomSearchConfiguration customSearchConfiguration) { - CustomizedQueryHandlerBuilder builder = hiddenBuilder() - .customSearchConfiguration(customSearchConfiguration); + public static CustomizedQueryHandlerBuilder builder( + @Nullable CustomSearchConfiguration customSearchConfiguration) { + CustomizedQueryHandlerBuilder builder = + hiddenBuilder().customSearchConfiguration(customSearchConfiguration); - if (customSearchConfiguration != null) { - builder.queryConfigurations(customSearchConfiguration.getQueryConfigurations().stream() - .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) - .collect(Collectors.toList())); - } - return builder; + if (customSearchConfiguration != null) { + builder.queryConfigurations( + customSearchConfiguration.getQueryConfigurations().stream() + .map(cfg -> Map.entry(Pattern.compile(cfg.getQueryRegex()), cfg)) + .collect(Collectors.toList())); } + return builder; + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java index 79c00fc7cdd20..3a7e72deed2fe 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/PITAwareSearchRequest.java @@ -3,7 +3,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; - public class PITAwareSearchRequest extends SearchRequest { private IndicesOptions indicesOptions; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java index 3e4f3427e7658..1fe4a74968e42 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchAfterWrapper.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; + import java.io.IOException; import java.io.Serializable; import java.nio.charset.StandardCharsets; @@ -8,10 +10,6 @@ import lombok.Data; import lombok.NoArgsConstructor; - -import static com.linkedin.metadata.models.registry.template.util.TemplateUtil.*; - - @Data @AllArgsConstructor @NoArgsConstructor @@ -22,7 +20,9 @@ public class SearchAfterWrapper implements Serializable { public static SearchAfterWrapper fromScrollId(String scrollId) { try { - return OBJECT_MAPPER.readValue(Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), SearchAfterWrapper.class); + return OBJECT_MAPPER.readValue( + Base64.getDecoder().decode(scrollId.getBytes(StandardCharsets.UTF_8)), + SearchAfterWrapper.class); } catch (IOException e) { throw new IllegalStateException("Invalid scroll Id cannot be mapped: " + scrollId, e); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java index a75ed40ffca52..7709ff16f7940 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchFieldConfig.java @@ -1,175 +1,162 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; + import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import java.util.Set; +import javax.annotation.Nonnull; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.experimental.Accessors; -import javax.annotation.Nonnull; - -import java.util.Set; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; - - @Builder @Getter @Accessors(fluent = true) @EqualsAndHashCode public class SearchFieldConfig { - public static final float DEFAULT_BOOST = 1.0f; - - public static final Set KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); - public static final Set PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); - - // These should not be used directly since there is a specific - // order in which these rules need to be evaluated for exceptions to - // the rules. - private static final Set TYPES_WITH_DELIMITED_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM - // NOT URN_PARTIAL (urn field is special) - ); - // NOT comprehensive - private static final Set TYPES_WITH_KEYWORD_SUBFIELD = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - private static final Set TYPES_WITH_BROWSE_PATH = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH - ); - private static final Set TYPES_WITH_BROWSE_PATH_V2 = - Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH_V2 - ); - private static final Set TYPES_WITH_BASE_KEYWORD = - Set.of( - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.WORD_GRAM, - // not analyzed - SearchableAnnotation.FieldType.BOOLEAN, - SearchableAnnotation.FieldType.COUNT, - SearchableAnnotation.FieldType.DATETIME, - SearchableAnnotation.FieldType.OBJECT - ); - // NOT true for `urn` - public static final Set TYPES_WITH_URN_TEXT = - Set.of( - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL - ); - - public static final Set TYPES_WITH_WORD_GRAM = - Set.of( - SearchableAnnotation.FieldType.WORD_GRAM - ); - - @Nonnull - private final String fieldName; - @Nonnull - private final String shortName; - @Builder.Default - private final Float boost = DEFAULT_BOOST; - private final String analyzer; - private boolean hasKeywordSubfield; - private boolean hasDelimitedSubfield; - private boolean hasWordGramSubfields; - private boolean isQueryByDefault; - private boolean isDelimitedSubfield; - private boolean isKeywordSubfield; - private boolean isWordGramSubfield; - - public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { - final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - final String fieldName = searchableAnnotation.getFieldName(); - final float boost = (float) searchableAnnotation.getBoostScore(); - final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); - return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); - } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + public static final float DEFAULT_BOOST = 1.0f; + + public static final Set KEYWORD_FIELDS = Set.of("urn", "runId", "_index"); + public static final Set PATH_HIERARCHY_FIELDS = Set.of("browsePathV2"); + + // These should not be used directly since there is a specific + // order in which these rules need to be evaluated for exceptions to + // the rules. + private static final Set TYPES_WITH_DELIMITED_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM + // NOT URN_PARTIAL (urn field is special) + ); + // NOT comprehensive + private static final Set TYPES_WITH_KEYWORD_SUBFIELD = + Set.of( + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.URN_PARTIAL); + private static final Set TYPES_WITH_BROWSE_PATH = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH); + private static final Set TYPES_WITH_BROWSE_PATH_V2 = + Set.of(SearchableAnnotation.FieldType.BROWSE_PATH_V2); + private static final Set TYPES_WITH_BASE_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.WORD_GRAM, + // not analyzed + SearchableAnnotation.FieldType.BOOLEAN, + SearchableAnnotation.FieldType.COUNT, + SearchableAnnotation.FieldType.DATETIME, + SearchableAnnotation.FieldType.OBJECT); + // NOT true for `urn` + public static final Set TYPES_WITH_URN_TEXT = + Set.of(SearchableAnnotation.FieldType.URN, SearchableAnnotation.FieldType.URN_PARTIAL); + + public static final Set TYPES_WITH_WORD_GRAM = + Set.of(SearchableAnnotation.FieldType.WORD_GRAM); + + @Nonnull private final String fieldName; + @Nonnull private final String shortName; + @Builder.Default private final Float boost = DEFAULT_BOOST; + private final String analyzer; + private boolean hasKeywordSubfield; + private boolean hasDelimitedSubfield; + private boolean hasWordGramSubfields; + private boolean isQueryByDefault; + private boolean isDelimitedSubfield; + private boolean isKeywordSubfield; + private boolean isWordGramSubfield; + + public static SearchFieldConfig detectSubFieldType(@Nonnull SearchableFieldSpec fieldSpec) { + final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); + final String fieldName = searchableAnnotation.getFieldName(); + final float boost = (float) searchableAnnotation.getBoostScore(); + final SearchableAnnotation.FieldType fieldType = searchableAnnotation.getFieldType(); + return detectSubFieldType(fieldName, boost, fieldType, searchableAnnotation.isQueryByDefault()); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, SearchableAnnotation.FieldType fieldType, boolean isQueryByDefault) { + return detectSubFieldType(fieldName, DEFAULT_BOOST, fieldType, isQueryByDefault); + } + + public static SearchFieldConfig detectSubFieldType( + String fieldName, + float boost, + SearchableAnnotation.FieldType fieldType, + boolean isQueryByDefault) { + return SearchFieldConfig.builder() + .fieldName(fieldName) + .boost(boost) + .analyzer(getAnalyzer(fieldName, fieldType)) + .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) + .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) + .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) + .isQueryByDefault(isQueryByDefault) + .build(); + } + + public boolean isKeyword() { + return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); + } + + private static boolean hasDelimitedSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") + && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); + } + + private static boolean hasWordGramSubfields( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !fieldName.contains(".") && (TYPES_WITH_WORD_GRAM.contains(fieldType)); + } + + private static boolean hasKeywordSubfield( + String fieldName, SearchableAnnotation.FieldType fieldType) { + return !"urn".equals(fieldName) + && !fieldName.contains(".") + && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword + || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); + } + + private static boolean isKeyword(String fieldName) { + return fieldName.endsWith(".keyword") || KEYWORD_FIELDS.contains(fieldName); + } + + private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { + // order is important + if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { + return BROWSE_PATH_HIERARCHY_ANALYZER; + } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { + return BROWSE_PATH_V2_HIERARCHY_ANALYZER; + // sub fields + } else if (isKeyword(fieldName)) { + return KEYWORD_ANALYZER; + } else if (fieldName.endsWith(".delimited")) { + return TEXT_SEARCH_ANALYZER; + // non-subfield cases below + } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { + return KEYWORD_ANALYZER; + } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { + return URN_SEARCH_ANALYZER; + } else { + throw new IllegalStateException( + String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); } - - public static SearchFieldConfig detectSubFieldType(String fieldName, - float boost, - SearchableAnnotation.FieldType fieldType, - boolean isQueryByDefault) { - return SearchFieldConfig.builder() - .fieldName(fieldName) - .boost(boost) - .analyzer(getAnalyzer(fieldName, fieldType)) - .hasKeywordSubfield(hasKeywordSubfield(fieldName, fieldType)) - .hasDelimitedSubfield(hasDelimitedSubfield(fieldName, fieldType)) - .hasWordGramSubfields(hasWordGramSubfields(fieldName, fieldType)) - .isQueryByDefault(isQueryByDefault) - .build(); - } - - public boolean isKeyword() { - return KEYWORD_ANALYZER.equals(analyzer()) || isKeyword(fieldName()); - } - - private static boolean hasDelimitedSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && ("urn".equals(fieldName) || TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType)); - } - - private static boolean hasWordGramSubfields(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !fieldName.contains(".") - && (TYPES_WITH_WORD_GRAM.contains(fieldType)); - } - private static boolean hasKeywordSubfield(String fieldName, SearchableAnnotation.FieldType fieldType) { - return !"urn".equals(fieldName) - && !fieldName.contains(".") - && (TYPES_WITH_DELIMITED_SUBFIELD.contains(fieldType) // if delimited then also has keyword - || TYPES_WITH_KEYWORD_SUBFIELD.contains(fieldType)); - } - private static boolean isKeyword(String fieldName) { - return fieldName.endsWith(".keyword") - || KEYWORD_FIELDS.contains(fieldName); - } - - private static String getAnalyzer(String fieldName, SearchableAnnotation.FieldType fieldType) { - // order is important - if (TYPES_WITH_BROWSE_PATH.contains(fieldType)) { - return BROWSE_PATH_HIERARCHY_ANALYZER; - } else if (TYPES_WITH_BROWSE_PATH_V2.contains(fieldType)) { - return BROWSE_PATH_V2_HIERARCHY_ANALYZER; - // sub fields - } else if (isKeyword(fieldName)) { - return KEYWORD_ANALYZER; - } else if (fieldName.endsWith(".delimited")) { - return TEXT_SEARCH_ANALYZER; - // non-subfield cases below - } else if (TYPES_WITH_BASE_KEYWORD.contains(fieldType)) { - return KEYWORD_ANALYZER; - } else if (TYPES_WITH_URN_TEXT.contains(fieldType)) { - return URN_SEARCH_ANALYZER; - } else { - throw new IllegalStateException(String.format("Unknown analyzer for fieldName: %s, fieldType: %s", fieldName, fieldType)); - } - } - - public static class SearchFieldConfigBuilder { - public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { - this.fieldName = fieldName; - isDelimitedSubfield(fieldName.endsWith(".delimited")); - isKeywordSubfield(fieldName.endsWith(".keyword")); - isWordGramSubfield(fieldName.contains("wordGrams")); - shortName(fieldName.split("[.]")[0]); - return this; - } + } + + public static class SearchFieldConfigBuilder { + public SearchFieldConfigBuilder fieldName(@Nonnull String fieldName) { + this.fieldName = fieldName; + isDelimitedSubfield(fieldName.endsWith(".delimited")); + isKeywordSubfield(fieldName.endsWith(".keyword")); + isWordGramSubfield(fieldName.contains("wordGrams")); + shortName(fieldName.split("[.]")[0]); + return this; } + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index ce88f31449c35..7ddccb0d56724 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -1,6 +1,14 @@ package com.linkedin.metadata.search.elasticsearch.query.request; +import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.StreamReadConstraints; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; @@ -8,16 +16,12 @@ import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.StreamReadConstraints; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchScoreFieldSpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchScoreAnnotation; import com.linkedin.metadata.models.annotation.SearchableAnnotation; - +import com.linkedin.metadata.search.utils.ESUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -32,16 +36,14 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.search.utils.ESUtils; import lombok.extern.slf4j.Slf4j; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.Operator; @@ -54,20 +56,25 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.opensearch.search.SearchModule; -import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.*; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.*; - - @Slf4j public class SearchQueryBuilder { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { OBJECT_MAPPER.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, Constants.MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault( + Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH, + Constants.MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final NamedXContentRegistry X_CONTENT_REGISTRY; + static { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); X_CONTENT_REGISTRY = new NamedXContentRegistry(searchModule.getNamedXContents()); @@ -80,49 +87,63 @@ public class SearchQueryBuilder { private final CustomizedQueryHandler customizedQueryHandler; - public SearchQueryBuilder(@Nonnull SearchConfiguration searchConfiguration, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public SearchQueryBuilder( + @Nonnull SearchConfiguration searchConfiguration, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this.exactMatchConfiguration = searchConfiguration.getExactMatch(); this.partialConfiguration = searchConfiguration.getPartial(); this.wordGramConfiguration = searchConfiguration.getWordGram(); this.customizedQueryHandler = CustomizedQueryHandler.builder(customSearchConfiguration).build(); } - public QueryBuilder buildQuery(@Nonnull List entitySpecs, @Nonnull String query, boolean fulltext) { - QueryConfiguration customQueryConfig = customizedQueryHandler.lookupQueryConfig(query).orElse(null); + public QueryBuilder buildQuery( + @Nonnull List entitySpecs, @Nonnull String query, boolean fulltext) { + QueryConfiguration customQueryConfig = + customizedQueryHandler.lookupQueryConfig(query).orElse(null); - final QueryBuilder queryBuilder = buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); + final QueryBuilder queryBuilder = + buildInternalQuery(customQueryConfig, entitySpecs, query, fulltext); return buildScoreFunctions(customQueryConfig, entitySpecs, queryBuilder); } /** * Constructs the search query. + * * @param customQueryConfig custom configuration * @param entitySpecs entities being searched * @param query search string * @param fulltext use fulltext queries * @return query builder */ - private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQueryConfig, @Nonnull List entitySpecs, - @Nonnull String query, boolean fulltext) { + private QueryBuilder buildInternalQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List entitySpecs, + @Nonnull String query, + boolean fulltext) { final String sanitizedQuery = query.replaceFirst("^:+", ""); - final BoolQueryBuilder finalQuery = Optional.ofNullable(customQueryConfig) + final BoolQueryBuilder finalQuery = + Optional.ofNullable(customQueryConfig) .flatMap(cqc -> boolQueryBuilder(cqc, sanitizedQuery)) .orElse(QueryBuilders.boolQuery()); if (fulltext && !query.startsWith(STRUCTURED_QUERY_PREFIX)) { getSimpleQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); - getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(customQueryConfig, entitySpecs, sanitizedQuery) + .ifPresent(finalQuery::should); } else { - final String withoutQueryPrefix = query.startsWith(STRUCTURED_QUERY_PREFIX) ? query.substring(STRUCTURED_QUERY_PREFIX.length()) : query; + final String withoutQueryPrefix = + query.startsWith(STRUCTURED_QUERY_PREFIX) + ? query.substring(STRUCTURED_QUERY_PREFIX.length()) + : query; QueryStringQueryBuilder queryBuilder = QueryBuilders.queryStringQuery(withoutQueryPrefix); queryBuilder.defaultOperator(Operator.AND); - getStandardFields(entitySpecs).forEach(entitySpec -> - queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); + getStandardFields(entitySpecs) + .forEach(entitySpec -> queryBuilder.field(entitySpec.fieldName(), entitySpec.boost())); finalQuery.should(queryBuilder); if (exactMatchConfiguration.isEnableStructured()) { - getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix).ifPresent(finalQuery::should); + getPrefixAndExactMatchQuery(null, entitySpecs, withoutQueryPrefix) + .ifPresent(finalQuery::should); } } @@ -130,7 +151,9 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery } /** - * Gets searchable fields from all entities in the input collection. De-duplicates fields across entities. + * Gets searchable fields from all entities in the input collection. De-duplicates fields across + * entities. + * * @param entitySpecs: Entity specs to extract searchable fields from * @return A set of SearchFieldConfigs containing the searchable fields from the input entities. */ @@ -138,31 +161,42 @@ private QueryBuilder buildInternalQuery(@Nullable QueryConfiguration customQuery public Set getStandardFields(@Nonnull Collection entitySpecs) { Set fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); entitySpecs.stream() .map(this::getFieldsFromEntitySpec) .flatMap(Set::stream) - .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)).forEach((key, value) -> - fields.add( - new SearchFieldConfig( - key, - value.get(0).shortName(), - (float) value.stream().mapToDouble(SearchFieldConfig::boost).average().getAsDouble(), - value.get(0).analyzer(), - value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), - true, - value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), - value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), - value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield) - )) - ); + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)) + .forEach( + (key, value) -> + fields.add( + new SearchFieldConfig( + key, + value.get(0).shortName(), + (float) + value.stream() + .mapToDouble(SearchFieldConfig::boost) + .average() + .getAsDouble(), + value.get(0).analyzer(), + value.stream().anyMatch(SearchFieldConfig::hasKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::hasDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::hasWordGramSubfields), + true, + value.stream().anyMatch(SearchFieldConfig::isDelimitedSubfield), + value.stream().anyMatch(SearchFieldConfig::isKeywordSubfield), + value.stream().anyMatch(SearchFieldConfig::isWordGramSubfield)))); return fields; } @@ -182,38 +216,44 @@ public Set getFieldsFromEntitySpec(EntitySpec entitySpec) { if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasDelimitedSubfield()) { final SearchableAnnotation searchableAnnotation = fieldSpec.getSearchableAnnotation(); - fields.add(SearchFieldConfig.detectSubFieldType(searchFieldConfig.fieldName() + ".delimited", - searchFieldConfig.boost() * partialConfiguration.getFactor(), - searchableAnnotation.getFieldType(), searchableAnnotation.isQueryByDefault())); + fields.add( + SearchFieldConfig.detectSubFieldType( + searchFieldConfig.fieldName() + ".delimited", + searchFieldConfig.boost() * partialConfiguration.getFactor(), + searchableAnnotation.getFieldType(), + searchableAnnotation.isQueryByDefault())); if (SearchFieldConfig.detectSubFieldType(fieldSpec).hasWordGramSubfields()) { - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) - .analyzer(WORD_GRAM_2_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) - .analyzer(WORD_GRAM_3_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); - fields.add(SearchFieldConfig.builder() - .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") - .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) - .analyzer(WORD_GRAM_4_ANALYZER) - .hasKeywordSubfield(true) - .hasDelimitedSubfield(true) - .hasWordGramSubfields(true) - .isQueryByDefault(true) - .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams2") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getTwoGramFactor()) + .analyzer(WORD_GRAM_2_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams3") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getThreeGramFactor()) + .analyzer(WORD_GRAM_3_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); + fields.add( + SearchFieldConfig.builder() + .fieldName(searchFieldConfig.fieldName() + ".wordGrams4") + .boost(searchFieldConfig.boost() * wordGramConfiguration.getFourGramFactor()) + .analyzer(WORD_GRAM_4_ANALYZER) + .hasKeywordSubfield(true) + .hasDelimitedSubfield(true) + .hasWordGramSubfields(true) + .isQueryByDefault(true) + .build()); } } } @@ -224,11 +264,18 @@ private Set getStandardFields(@Nonnull EntitySpec entitySpec) Set fields = new HashSet<>(); // Always present - final float urnBoost = Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); - - fields.add(SearchFieldConfig.detectSubFieldType("urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); - fields.add(SearchFieldConfig.detectSubFieldType("urn.delimited", urnBoost * partialConfiguration.getUrnFactor(), - SearchableAnnotation.FieldType.URN, true)); + final float urnBoost = + Float.parseFloat((String) PRIMARY_URN_SEARCH_PROPERTIES.get("boostScore")); + + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn", urnBoost, SearchableAnnotation.FieldType.URN, true)); + fields.add( + SearchFieldConfig.detectSubFieldType( + "urn.delimited", + urnBoost * partialConfiguration.getUrnFactor(), + SearchableAnnotation.FieldType.URN, + true)); fields.addAll(getFieldsFromEntitySpec(entitySpec)); @@ -242,9 +289,11 @@ private static String unquote(String query) { private static boolean isQuoted(String query) { return Stream.of("\"", "'").anyMatch(query::contains); } - private Optional getSimpleQuery(@Nullable QueryConfiguration customQueryConfig, - List entitySpecs, - String sanitizedQuery) { + + private Optional getSimpleQuery( + @Nullable QueryConfiguration customQueryConfig, + List entitySpecs, + String sanitizedQuery) { Optional result = Optional.empty(); final boolean executeSimpleQuery; @@ -263,25 +312,34 @@ private Optional getSimpleQuery(@Nullable QueryConfiguration custo BoolQueryBuilder simplePerField = QueryBuilders.boolQuery(); // Simple query string does not use per field analyzers // Group the fields by analyzer - Map> analyzerGroup = entitySpecs.stream() + Map> analyzerGroup = + entitySpecs.stream() .map(this::getStandardFields) .flatMap(Set::stream) .filter(SearchFieldConfig::isQueryByDefault) .collect(Collectors.groupingBy(SearchFieldConfig::analyzer)); - analyzerGroup.keySet().stream().sorted().filter(str -> !str.contains("word_gram")).forEach(analyzer -> { - List fieldConfigs = analyzerGroup.get(analyzer); - SimpleQueryStringBuilder simpleBuilder = QueryBuilders.simpleQueryStringQuery(sanitizedQuery); - simpleBuilder.analyzer(analyzer); - simpleBuilder.defaultOperator(Operator.AND); - Map> fieldAnalyzers = fieldConfigs.stream().collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); - // De-duplicate fields across different indices - for (Map.Entry> fieldAnalyzer : fieldAnalyzers.entrySet()) { - SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); - simpleBuilder.field(cfg.fieldName(), cfg.boost()); - } - simplePerField.should(simpleBuilder); - }); + analyzerGroup.keySet().stream() + .sorted() + .filter(str -> !str.contains("word_gram")) + .forEach( + analyzer -> { + List fieldConfigs = analyzerGroup.get(analyzer); + SimpleQueryStringBuilder simpleBuilder = + QueryBuilders.simpleQueryStringQuery(sanitizedQuery); + simpleBuilder.analyzer(analyzer); + simpleBuilder.defaultOperator(Operator.AND); + Map> fieldAnalyzers = + fieldConfigs.stream() + .collect(Collectors.groupingBy(SearchFieldConfig::fieldName)); + // De-duplicate fields across different indices + for (Map.Entry> fieldAnalyzer : + fieldAnalyzers.entrySet()) { + SearchFieldConfig cfg = fieldAnalyzer.getValue().get(0); + simpleBuilder.field(cfg.fieldName(), cfg.boost()); + } + simplePerField.should(simpleBuilder); + }); result = Optional.of(simplePerField); } @@ -289,99 +347,133 @@ private Optional getSimpleQuery(@Nullable QueryConfiguration custo return result; } - private Optional getPrefixAndExactMatchQuery(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List entitySpecs, - String query) { + private Optional getPrefixAndExactMatchQuery( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List entitySpecs, + String query) { - final boolean isPrefixQuery = customQueryConfig == null ? exactMatchConfiguration.isWithPrefix() : customQueryConfig.isPrefixMatchQuery(); + final boolean isPrefixQuery = + customQueryConfig == null + ? exactMatchConfiguration.isWithPrefix() + : customQueryConfig.isPrefixMatchQuery(); final boolean isExactQuery = customQueryConfig == null || customQueryConfig.isExactMatchQuery(); - BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); + BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); String unquotedQuery = unquote(query); - getStandardFields(entitySpecs).forEach(searchFieldConfig -> { - if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { - finalQuery.should(QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getPrefixFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.shortName())); // less than exact - } - - if (searchFieldConfig.isKeyword() && isExactQuery) { - // It is important to use the subfield .keyword (it uses a different normalizer) - // The non-.keyword field removes case information - - // Exact match case-sensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(false) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) - .queryName(searchFieldConfig.shortName())); - - // Exact match case-insensitive - finalQuery.should( - QueryBuilders.termQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), unquotedQuery) - .caseInsensitive(true) - .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor() - * exactMatchConfiguration.getCaseSensitivityFactor()) - .queryName(searchFieldConfig.fieldName())); - } - - if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { - finalQuery.should( - QueryBuilders.matchPhraseQuery(ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), - unquotedQuery) - .boost(searchFieldConfig.boost() * getWordGramFactor(searchFieldConfig.fieldName())) - .queryName(searchFieldConfig.shortName())); - } - }); + getStandardFields(entitySpecs) + .forEach( + searchFieldConfig -> { + if (searchFieldConfig.isDelimitedSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhrasePrefixQuery(searchFieldConfig.fieldName(), query) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getPrefixFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.shortName())); // less than exact + } + + if (searchFieldConfig.isKeyword() && isExactQuery) { + // It is important to use the subfield .keyword (it uses a different normalizer) + // The non-.keyword field removes case information + + // Exact match case-sensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(false) + .boost(searchFieldConfig.boost() * exactMatchConfiguration.getExactFactor()) + .queryName(searchFieldConfig.shortName())); + + // Exact match case-insensitive + finalQuery.should( + QueryBuilders.termQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .caseInsensitive(true) + .boost( + searchFieldConfig.boost() + * exactMatchConfiguration.getExactFactor() + * exactMatchConfiguration.getCaseSensitivityFactor()) + .queryName(searchFieldConfig.fieldName())); + } + + if (searchFieldConfig.isWordGramSubfield() && isPrefixQuery) { + finalQuery.should( + QueryBuilders.matchPhraseQuery( + ESUtils.toKeywordField(searchFieldConfig.fieldName(), false), + unquotedQuery) + .boost( + searchFieldConfig.boost() + * getWordGramFactor(searchFieldConfig.fieldName())) + .queryName(searchFieldConfig.shortName())); + } + }); return finalQuery.should().size() > 0 ? Optional.of(finalQuery) : Optional.empty(); } - private FunctionScoreQueryBuilder buildScoreFunctions(@Nullable QueryConfiguration customQueryConfig, - @Nonnull List entitySpecs, - @Nonnull QueryBuilder queryBuilder) { + private FunctionScoreQueryBuilder buildScoreFunctions( + @Nullable QueryConfiguration customQueryConfig, + @Nonnull List entitySpecs, + @Nonnull QueryBuilder queryBuilder) { if (customQueryConfig != null) { // Prefer configuration function scoring over annotation scoring return functionScoreQueryBuilder(customQueryConfig, queryBuilder); } else { - return QueryBuilders.functionScoreQuery(queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions - .boostMode(CombineFunction.MULTIPLY); // Multiply score function with the score from query; + return QueryBuilders.functionScoreQuery( + queryBuilder, buildAnnotationScoreFunctions(entitySpecs)) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) // Average score functions + .boostMode( + CombineFunction.MULTIPLY); // Multiply score function with the score from query; } } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions(@Nonnull List entitySpecs) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotationScoreFunctions( + @Nonnull List entitySpecs) { List finalScoreFunctions = new ArrayList<>(); // Add a default weight of 1.0 to make sure the score function is larger than 1 finalScoreFunctions.add( - new FunctionScoreQueryBuilder.FilterFunctionBuilder(ScoreFunctionBuilders.weightFactorFunction(1.0f))); - - Map annotations = entitySpecs.stream() - .map(EntitySpec::getSearchableFieldSpecs) - .flatMap(List::stream) - .map(SearchableFieldSpec::getSearchableAnnotation) - .collect(Collectors.toMap(SearchableAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1.0f))); + + Map annotations = + entitySpecs.stream() + .map(EntitySpec::getSearchableFieldSpecs) + .flatMap(List::stream) + .map(SearchableFieldSpec::getSearchableAnnotation) + .collect( + Collectors.toMap( + SearchableAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); for (Map.Entry annotationEntry : annotations.entrySet()) { SearchableAnnotation annotation = annotationEntry.getValue(); - annotation - .getWeightsPerFieldValue() - .entrySet() - .stream() - .map(entry -> buildWeightFactorFunction(annotation.getFieldName(), entry.getKey(), - entry.getValue())).forEach(finalScoreFunctions::add); + annotation.getWeightsPerFieldValue().entrySet().stream() + .map( + entry -> + buildWeightFactorFunction( + annotation.getFieldName(), entry.getKey(), entry.getValue())) + .forEach(finalScoreFunctions::add); } - Map searchScoreAnnotationMap = entitySpecs.stream() - .map(EntitySpec::getSearchScoreFieldSpecs) - .flatMap(List::stream) - .map(SearchScoreFieldSpec::getSearchScoreAnnotation) - .collect(Collectors.toMap(SearchScoreAnnotation::getFieldName, annotation -> annotation, (annotation1, annotation2) -> annotation1)); - for (Map.Entry searchScoreAnnotationEntry : searchScoreAnnotationMap.entrySet()) { + Map searchScoreAnnotationMap = + entitySpecs.stream() + .map(EntitySpec::getSearchScoreFieldSpecs) + .flatMap(List::stream) + .map(SearchScoreFieldSpec::getSearchScoreAnnotation) + .collect( + Collectors.toMap( + SearchScoreAnnotation::getFieldName, + annotation -> annotation, + (annotation1, annotation2) -> annotation1)); + for (Map.Entry searchScoreAnnotationEntry : + searchScoreAnnotationMap.entrySet()) { SearchScoreAnnotation annotation = searchScoreAnnotationEntry.getValue(); finalScoreFunctions.add(buildScoreFunctionFromSearchScoreAnnotation(annotation)); } @@ -389,14 +481,15 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder[] buildAnnotation return finalScoreFunctions.toArray(new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction(@Nonnull String fieldName, - @Nonnull Object fieldValue, double weight) { - return new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery(fieldName, fieldValue), + private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildWeightFactorFunction( + @Nonnull String fieldName, @Nonnull Object fieldValue, double weight) { + return new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery(fieldName, fieldValue), ScoreFunctionBuilders.weightFactorFunction((float) weight)); } - private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctionFromSearchScoreAnnotation( - @Nonnull SearchScoreAnnotation annotation) { + private static FunctionScoreQueryBuilder.FilterFunctionBuilder + buildScoreFunctionFromSearchScoreAnnotation(@Nonnull SearchScoreAnnotation annotation) { FieldValueFactorFunctionBuilder scoreFunction = ScoreFunctionBuilders.fieldValueFactorFunction(annotation.getFieldName()); scoreFunction.factor((float) annotation.getWeight()); @@ -405,7 +498,8 @@ private static FunctionScoreQueryBuilder.FilterFunctionBuilder buildScoreFunctio return new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction); } - private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotation.Modifier modifier) { + private static FieldValueFactorFunction.Modifier mapModifier( + SearchScoreAnnotation.Modifier modifier) { switch (modifier) { case LOG: return FieldValueFactorFunction.Modifier.LOG1P; @@ -422,33 +516,43 @@ private static FieldValueFactorFunction.Modifier mapModifier(SearchScoreAnnotati } } - public FunctionScoreQueryBuilder functionScoreQueryBuilder(QueryConfiguration customQueryConfiguration, - QueryBuilder queryBuilder) { + public FunctionScoreQueryBuilder functionScoreQueryBuilder( + QueryConfiguration customQueryConfiguration, QueryBuilder queryBuilder) { return toFunctionScoreQueryBuilder(queryBuilder, customQueryConfiguration.getFunctionScore()); } - public Optional boolQueryBuilder(QueryConfiguration customQueryConfiguration, String query) { + public Optional boolQueryBuilder( + QueryConfiguration customQueryConfiguration, String query) { if (customQueryConfiguration.getBoolQuery() != null) { - log.debug("Using custom query configuration queryRegex: {}", customQueryConfiguration.getQueryRegex()); + log.debug( + "Using custom query configuration queryRegex: {}", + customQueryConfiguration.getQueryRegex()); } - return Optional.ofNullable(customQueryConfiguration.getBoolQuery()).map(bq -> toBoolQueryBuilder(query, bq)); + return Optional.ofNullable(customQueryConfiguration.getBoolQuery()) + .map(bq -> toBoolQueryBuilder(query, bq)); } private BoolQueryBuilder toBoolQueryBuilder(String query, BoolQueryConfiguration boolQuery) { try { - String jsonFragment = OBJECT_MAPPER.writeValueAsString(boolQuery) - .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) - .replace("\"{{unquoted_query_string}}\"", OBJECT_MAPPER.writeValueAsString(unquote(query))); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = + OBJECT_MAPPER + .writeValueAsString(boolQuery) + .replace("\"{{query_string}}\"", OBJECT_MAPPER.writeValueAsString(query)) + .replace( + "\"{{unquoted_query_string}}\"", + OBJECT_MAPPER.writeValueAsString(unquote(query))); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return BoolQueryBuilder.fromXContent(parser); } catch (IOException e) { throw new RuntimeException(e); } } - private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder queryBuilder, - Map params) { + private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder( + QueryBuilder queryBuilder, Map params) { try { HashMap body = new HashMap<>(params); if (!body.isEmpty()) { @@ -457,11 +561,11 @@ private FunctionScoreQueryBuilder toFunctionScoreQueryBuilder(QueryBuilder query body.put("query", OBJECT_MAPPER.readValue(queryBuilder.toString(), Map.class)); - String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of( - "function_score", body - )); - XContentParser parser = XContentType.JSON.xContent().createParser(X_CONTENT_REGISTRY, - LoggingDeprecationHandler.INSTANCE, jsonFragment); + String jsonFragment = OBJECT_MAPPER.writeValueAsString(Map.of("function_score", body)); + XContentParser parser = + XContentType.JSON + .xContent() + .createParser(X_CONTENT_REGISTRY, LoggingDeprecationHandler.INSTANCE, jsonFragment); return (FunctionScoreQueryBuilder) FunctionScoreQueryBuilder.parseInnerQueryBuilder(parser); } catch (IOException e) { throw new RuntimeException(e); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index 0df6afd49c373..80da30229b74c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -1,13 +1,18 @@ package com.linkedin.metadata.search.elasticsearch.query.request; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; +import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.DoubleMap; import com.linkedin.data.template.LongMap; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; @@ -51,7 +56,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; import org.opensearch.action.search.SearchRequest; @@ -72,25 +76,21 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightField; import org.opensearch.search.suggest.term.TermSuggestion; -import static com.linkedin.metadata.search.utils.ESUtils.NAME_SUGGESTION; -import static com.linkedin.metadata.search.utils.ESUtils.toFacetField; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; -import static com.linkedin.metadata.utils.SearchUtil.*; - - @Slf4j public class SearchRequestHandler { - private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = new SearchFlags() + private static final SearchFlags DEFAULT_SERVICE_SEARCH_FLAGS = + new SearchFlags() .setFulltext(false) .setMaxAggValues(20) .setSkipCache(false) .setSkipAggregates(false) .setSkipHighlighting(false); - private static final Map, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = new ConcurrentHashMap<>(); + private static final Map, SearchRequestHandler> REQUEST_HANDLER_BY_ENTITY_NAME = + new ConcurrentHashMap<>(); private static final String REMOVED = "removed"; private static final String URN_FILTER = "urn"; - private static final String[] FIELDS_TO_FETCH = new String[]{"urn", "usageCountLast30Days"}; - private static final String[] URN_FIELD = new String[]{"urn"}; + private static final String[] FIELDS_TO_FETCH = new String[] {"urn", "usageCountLast30Days"}; + private static final String[] URN_FIELD = new String[] {"urn"}; private final List _entitySpecs; private final Set _defaultQueryFieldNames; @@ -100,19 +100,24 @@ public class SearchRequestHandler { private final SearchQueryBuilder _searchQueryBuilder; private final AggregationQueryBuilder _aggregationQueryBuilder; - private SearchRequestHandler(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { this(ImmutableList.of(entitySpec), configs, customSearchConfiguration); } - private SearchRequestHandler(@Nonnull List entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + private SearchRequestHandler( + @Nonnull List entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { _entitySpecs = entitySpecs; List annotations = getSearchableAnnotations(); _defaultQueryFieldNames = getDefaultQueryFieldNames(annotations); - _filtersToDisplayName = annotations.stream() - .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); + _filtersToDisplayName = + annotations.stream() + .flatMap(annotation -> getFacetFieldDisplayNameFromAnnotation(annotation).stream()) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond, mapMerger())); _filtersToDisplayName.put(INDEX_VIRTUAL_FIELD, "Type"); _highlights = getHighlights(); _searchQueryBuilder = new SearchQueryBuilder(configs, customSearchConfiguration); @@ -120,16 +125,22 @@ private SearchRequestHandler(@Nonnull List entitySpecs, @Nonnull Sea _configs = configs; } - public static SearchRequestHandler getBuilder(@Nonnull EntitySpec entitySpec, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull EntitySpec entitySpec, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.of(entitySpec), k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); + ImmutableList.of(entitySpec), + k -> new SearchRequestHandler(entitySpec, configs, customSearchConfiguration)); } - public static SearchRequestHandler getBuilder(@Nonnull List entitySpecs, @Nonnull SearchConfiguration configs, - @Nullable CustomSearchConfiguration customSearchConfiguration) { + public static SearchRequestHandler getBuilder( + @Nonnull List entitySpecs, + @Nonnull SearchConfiguration configs, + @Nullable CustomSearchConfiguration customSearchConfiguration) { return REQUEST_HANDLER_BY_ENTITY_NAME.computeIfAbsent( - ImmutableList.copyOf(entitySpecs), k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); + ImmutableList.copyOf(entitySpecs), + k -> new SearchRequestHandler(entitySpecs, configs, customSearchConfiguration)); } private List getSearchableAnnotations() { @@ -142,21 +153,22 @@ private List getSearchableAnnotations() { @VisibleForTesting private Set getDefaultQueryFieldNames(List annotations) { - return Stream.concat(annotations.stream() - .filter(SearchableAnnotation::isQueryByDefault) - .map(SearchableAnnotation::getFieldName), + return Stream.concat( + annotations.stream() + .filter(SearchableAnnotation::isQueryByDefault) + .map(SearchableAnnotation::getFieldName), Stream.of("urn")) - .collect(Collectors.toSet()); + .collect(Collectors.toSet()); } // If values are not equal, throw error private BinaryOperator mapMerger() { return (s1, s2) -> { - if (!StringUtils.equals(s1, s2)) { - throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); - } - return s1; - }; + if (!StringUtils.equals(s1, s2)) { + throw new IllegalStateException(String.format("Unable to merge values %s and %s", s1, s2)); + } + return s1; + }; } public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { @@ -168,7 +180,8 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { /** * Constructs the search query based on the query request. * - *

TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + *

TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -180,10 +193,16 @@ public static BoolQueryBuilder getFilterQuery(@Nullable Filter filter) { */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int from, int size, - @Nullable SearchFlags searchFlags, @Nullable List facets) { - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List facets) { + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -193,7 +212,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery() + searchSourceBuilder.query( + QueryBuilders.boolQuery() .must(getQuery(input, finalSearchFlags.isFulltext())) .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { @@ -217,7 +237,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi /** * Constructs the search query based on the query request. * - *

TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or later + *

TODO: This part will be replaced by searchTemplateAPI when the elastic is upgraded to 6.4 or + * later * * @param input the search input text * @param filter the search filter @@ -227,11 +248,18 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi */ @Nonnull @WithSpan - public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, @Nullable String pitId, @Nullable String keepAlive, - int size, SearchFlags searchFlags) { + public SearchRequest getSearchRequest( + @Nonnull String input, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive, + int size, + SearchFlags searchFlags) { SearchRequest searchRequest = new PITAwareSearchRequest(); - SearchFlags finalSearchFlags = applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); + SearchFlags finalSearchFlags = + applyDefaultSearchFlags(searchFlags, input, DEFAULT_SERVICE_SEARCH_FLAGS); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); ESUtils.setSearchAfter(searchSourceBuilder, sort, pitId, keepAlive); @@ -240,7 +268,10 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi searchSourceBuilder.fetchSource("urn", null); BoolQueryBuilder filterQuery = getFilterQuery(filter); - searchSourceBuilder.query(QueryBuilders.boolQuery().must(getQuery(input, finalSearchFlags.isFulltext())).filter(filterQuery)); + searchSourceBuilder.query( + QueryBuilders.boolQuery() + .must(getQuery(input, finalSearchFlags.isFulltext())) + .filter(filterQuery)); if (!finalSearchFlags.isSkipAggregates()) { _aggregationQueryBuilder.getAggregations().forEach(searchSourceBuilder::aggregation); } @@ -256,8 +287,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -266,8 +297,8 @@ public SearchRequest getSearchRequest(@Nonnull String input, @Nullable Filter fi * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, - int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, @Nullable SortCriterion sortCriterion, int from, int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -281,10 +312,10 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr } /** - * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to - * search results. + * Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion + * to be applied to search results. * - * TODO: Used in batch ingestion from ingestion scheduler + *

TODO: Used in batch ingestion from ingestion scheduler * * @param filters {@link Filter} list of conditions with fields and values * @param sortCriterion {@link SortCriterion} to be applied to the search results @@ -295,8 +326,13 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the filtered query */ @Nonnull - public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCriterion sortCriterion, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchRequest getFilterRequest( + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filters); @@ -320,14 +356,16 @@ public SearchRequest getFilterRequest(@Nullable Filter filters, @Nullable SortCr * @return {@link SearchRequest} that contains the aggregation query */ @Nonnull - public static SearchRequest getAggregationRequest(@Nonnull String field, @Nullable Filter filter, int limit) { + public static SearchRequest getAggregationRequest( + @Nonnull String field, @Nullable Filter filter, int limit) { SearchRequest searchRequest = new SearchRequest(); BoolQueryBuilder filterQuery = getFilterQuery(filter); final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(filterQuery); searchSourceBuilder.size(0); - searchSourceBuilder.aggregation(AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); + searchSourceBuilder.aggregation( + AggregationBuilders.terms(field).field(ESUtils.toKeywordField(field, false)).size(limit)); searchRequest.source(searchSourceBuilder); return searchRequest; @@ -347,19 +385,22 @@ public HighlightBuilder getHighlights() { // Check for each field name and any subfields _defaultQueryFieldNames.stream() - .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")).distinct() - .forEach(highlightBuilder::field); + .flatMap(fieldName -> Stream.of(fieldName, fieldName + ".*")) + .distinct() + .forEach(highlightBuilder::field); return highlightBuilder; } @WithSpan - public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { + public SearchResult extractResult( + @Nonnull SearchResponse searchResponse, Filter filter, int from, int size) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); - return new SearchResult().setEntities(new SearchEntityArray(resultList)) + return new SearchResult() + .setEntities(new SearchEntityArray(resultList)) .setMetadata(searchResultMetadata) .setFrom(from) .setPageSize(size) @@ -367,8 +408,13 @@ public SearchResult extractResult(@Nonnull SearchResponse searchResponse, Filter } @WithSpan - public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Filter filter, @Nullable String scrollId, - @Nullable String keepAlive, int size, boolean supportsPointInTime) { + public ScrollResult extractScrollResult( + @Nonnull SearchResponse searchResponse, + Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; List resultList = getResults(searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(searchResponse, filter); @@ -379,15 +425,21 @@ public ScrollResult extractScrollResult(@Nonnull SearchResponse searchResponse, Object[] sort = searchHits[searchHits.length - 1].getSortValues(); long expirationTimeMs = 0L; if (keepAlive != null && supportsPointInTime) { - expirationTimeMs = TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + System.currentTimeMillis(); + expirationTimeMs = + TimeValue.parseTimeValue(keepAlive, "expirationTime").getMillis() + + System.currentTimeMillis(); } - nextScrollId = new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs).toScrollId(); + nextScrollId = + new SearchAfterWrapper(sort, searchResponse.pointInTimeId(), expirationTimeMs) + .toScrollId(); } - ScrollResult scrollResult = new ScrollResult().setEntities(new SearchEntityArray(resultList)) - .setMetadata(searchResultMetadata) - .setPageSize(size) - .setNumEntities(totalCount); + ScrollResult scrollResult = + new ScrollResult() + .setEntities(new SearchEntityArray(resultList)) + .setMetadata(searchResultMetadata) + .setPageSize(size) + .setNumEntities(totalCount); if (nextScrollId != null) { scrollResult.setScrollId(nextScrollId); @@ -418,17 +470,20 @@ private List extractMatchedFields(@Nonnull SearchHit hit) { if (!highlightedFieldNamesAndValues.containsKey(queryName)) { if (hit.getFields().containsKey(queryName)) { for (Object fieldValue : hit.getFields().get(queryName).getValues()) { - highlightedFieldNamesAndValues.computeIfAbsent(queryName, k -> new HashSet<>()).add(fieldValue.toString()); + highlightedFieldNamesAndValues + .computeIfAbsent(queryName, k -> new HashSet<>()) + .add(fieldValue.toString()); } } else { highlightedFieldNamesAndValues.put(queryName, Set.of("")); } } } - return highlightedFieldNamesAndValues.entrySet() - .stream() + return highlightedFieldNamesAndValues.entrySet().stream() .flatMap( - entry -> entry.getValue().stream().map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) + entry -> + entry.getValue().stream() + .map(value -> new MatchedField().setName(entry.getKey()).setValue(value))) .collect(Collectors.toList()); } @@ -438,11 +493,13 @@ private Optional getFieldName(String matchedField) { } private Map extractFeatures(@Nonnull SearchHit searchHit) { - return ImmutableMap.of(Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); + return ImmutableMap.of( + Features.Name.SEARCH_BACKEND_SCORE.toString(), (double) searchHit.getScore()); } private SearchEntity getResult(@Nonnull SearchHit hit) { - return new SearchEntity().setEntity(getUrnFromSearchHit(hit)) + return new SearchEntity() + .setEntity(getUrnFromSearchHit(hit)) .setMatchedFields(new MatchedFieldArray(extractMatchedFields(hit))) .setScore(hit.getScore()) .setFeatures(new DoubleMap(extractFeatures(hit))); @@ -456,7 +513,9 @@ private SearchEntity getResult(@Nonnull SearchHit hit) { */ @Nonnull private List getResults(@Nonnull SearchResponse searchResponse) { - return Arrays.stream(searchResponse.getHits().getHits()).map(this::getResult).collect(Collectors.toList()); + return Arrays.stream(searchResponse.getHits().getHits()) + .map(this::getResult) + .collect(Collectors.toList()); } @Nonnull @@ -473,15 +532,17 @@ private Urn getUrnFromSearchHit(@Nonnull SearchHit hit) { * * @param searchResponse the raw {@link SearchResponse} as obtained from the search engine * @param filter the provided Filter to use with Elasticsearch - * - * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link SearchResponse} + * @return {@link SearchResultMetadata} with aggregation and list of urns obtained from {@link + * SearchResponse} */ @Nonnull - private SearchResultMetadata extractSearchResultMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private SearchResultMetadata extractSearchResultMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final SearchResultMetadata searchResultMetadata = new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - final List aggregationMetadataList = extractAggregationMetadata(searchResponse, filter); + final List aggregationMetadataList = + extractAggregationMetadata(searchResponse, filter); searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); final List searchSuggestions = extractSearchSuggestions(searchResponse); @@ -494,34 +555,43 @@ private String computeDisplayName(String name) { if (_filtersToDisplayName.containsKey(name)) { return _filtersToDisplayName.get(name); } else if (name.contains(AGGREGATION_SEPARATOR_CHAR)) { - return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)).map(_filtersToDisplayName::get).collect( - Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); + return Arrays.stream(name.split(AGGREGATION_SEPARATOR_CHAR)) + .map(_filtersToDisplayName::get) + .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } return name; } - private List extractAggregationMetadata(@Nonnull SearchResponse searchResponse, @Nullable Filter filter) { + private List extractAggregationMetadata( + @Nonnull SearchResponse searchResponse, @Nullable Filter filter) { final List aggregationMetadataList = new ArrayList<>(); if (searchResponse.getAggregations() == null) { return addFiltersToAggregationMetadata(aggregationMetadataList, filter); } - for (Map.Entry entry : searchResponse.getAggregations().getAsMap().entrySet()) { - final Map oneTermAggResult = extractTermAggregations((ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); + for (Map.Entry entry : + searchResponse.getAggregations().getAsMap().entrySet()) { + final Map oneTermAggResult = + extractTermAggregations( + (ParsedTerms) entry.getValue(), entry.getKey().equals("_entityType")); if (oneTermAggResult.isEmpty()) { continue; } - final AggregationMetadata aggregationMetadata = new AggregationMetadata().setName(entry.getKey()) - .setDisplayName(computeDisplayName(entry.getKey())) - .setAggregations(new LongMap(oneTermAggResult)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); + final AggregationMetadata aggregationMetadata = + new AggregationMetadata() + .setName(entry.getKey()) + .setDisplayName(computeDisplayName(entry.getKey())) + .setAggregations(new LongMap(oneTermAggResult)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(oneTermAggResult, Collections.emptySet()))); aggregationMetadataList.add(aggregationMetadata); } return addFiltersToAggregationMetadata(aggregationMetadataList, filter); - } + } @WithSpan - public static Map extractTermAggregations(@Nonnull SearchResponse searchResponse, - @Nonnull String aggregationName) { + public static Map extractTermAggregations( + @Nonnull SearchResponse searchResponse, @Nonnull String aggregationName) { if (searchResponse.getAggregations() == null) { return Collections.emptyMap(); } @@ -530,7 +600,8 @@ public static Map extractTermAggregations(@Nonnull SearchResponse if (aggregation == null) { return Collections.emptyMap(); } - return extractTermAggregations((ParsedTerms) aggregation, aggregationName.equals("_entityType")); + return extractTermAggregations( + (ParsedTerms) aggregation, aggregationName.equals("_entityType")); } private List extractSearchSuggestions(@Nonnull SearchResponse searchResponse) { @@ -538,13 +609,18 @@ private List extractSearchSuggestions(@Nonnull SearchResponse if (searchResponse.getSuggest() != null) { TermSuggestion termSuggestion = searchResponse.getSuggest().getSuggestion(NAME_SUGGESTION); if (termSuggestion != null && termSuggestion.getEntries().size() > 0) { - termSuggestion.getEntries().get(0).getOptions().forEach(suggestOption -> { - SearchSuggestion searchSuggestion = new SearchSuggestion(); - searchSuggestion.setText(String.valueOf(suggestOption.getText())); - searchSuggestion.setFrequency(suggestOption.getFreq()); - searchSuggestion.setScore(suggestOption.getScore()); - searchSuggestions.add(searchSuggestion); - }); + termSuggestion + .getEntries() + .get(0) + .getOptions() + .forEach( + suggestOption -> { + SearchSuggestion searchSuggestion = new SearchSuggestion(); + searchSuggestion.setText(String.valueOf(suggestOption.getText())); + searchSuggestion.setFrequency(suggestOption.getFreq()); + searchSuggestion.setScore(suggestOption.getScore()); + searchSuggestions.add(searchSuggestion); + }); } } return searchSuggestions; @@ -552,6 +628,7 @@ private List extractSearchSuggestions(@Nonnull SearchResponse /** * Adds nested sub-aggregation values to the aggregated results + * * @param aggs The aggregations to traverse. Could be null (base case) * @return A map from names to aggregation count values */ @@ -568,8 +645,9 @@ private static Map recursivelyAddNestedSubAggs(@Nullable Aggregati String key = bucket.getKeyAsString(); // Gets filtered sub aggregation doc count if exist Map subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); - for (Map.Entry subAggEntry: subAggs.entrySet()) { - aggResult.put(key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); + for (Map.Entry subAggEntry : subAggs.entrySet()) { + aggResult.put( + key + AGGREGATION_SEPARATOR_CHAR + subAggEntry.getKey(), subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (docCount > 0) { @@ -588,7 +666,8 @@ private static Map recursivelyAddNestedSubAggs(@Nullable Aggregati * @return a map with aggregation key and corresponding doc counts */ @Nonnull - private static Map extractTermAggregations(@Nonnull ParsedTerms terms, boolean includeZeroes) { + private static Map extractTermAggregations( + @Nonnull ParsedTerms terms, boolean includeZeroes) { final Map aggResult = new HashMap<>(); List bucketList = terms.getBuckets(); @@ -598,7 +677,9 @@ private static Map extractTermAggregations(@Nonnull ParsedTerms te // Gets filtered sub aggregation doc count if exist Map subAggs = recursivelyAddNestedSubAggs(bucket.getAggregations()); for (Map.Entry subAggEntry : subAggs.entrySet()) { - aggResult.put(String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), subAggEntry.getValue()); + aggResult.put( + String.format("%s%s%s", key, AGGREGATION_SEPARATOR_CHAR, subAggEntry.getKey()), + subAggEntry.getValue()); } long docCount = bucket.getDocCount(); if (includeZeroes || docCount > 0) { @@ -609,11 +690,10 @@ private static Map extractTermAggregations(@Nonnull ParsedTerms te return aggResult; } - /** - * Injects the missing conjunctive filters into the aggregations list. - */ - public List addFiltersToAggregationMetadata(@Nonnull final List originalMetadata, @Nullable final Filter filter) { - if (filter == null) { + /** Injects the missing conjunctive filters into the aggregations list. */ + public List addFiltersToAggregationMetadata( + @Nonnull final List originalMetadata, @Nullable final Filter filter) { + if (filter == null) { return originalMetadata; } if (filter.hasOr()) { @@ -624,14 +704,18 @@ public List addFiltersToAggregationMetadata(@Nonnull final return originalMetadata; } - void addOrFiltersToAggregationMetadata(@Nonnull final ConjunctiveCriterionArray or, @Nonnull final List originalMetadata) { + void addOrFiltersToAggregationMetadata( + @Nonnull final ConjunctiveCriterionArray or, + @Nonnull final List originalMetadata) { for (ConjunctiveCriterion conjunction : or) { // For each item in the conjunction, inject an empty aggregation if necessary addCriteriaFiltersToAggregationMetadata(conjunction.getAnd(), originalMetadata); } } - private void addCriteriaFiltersToAggregationMetadata(@Nonnull final CriterionArray criteria, @Nonnull final List originalMetadata) { + private void addCriteriaFiltersToAggregationMetadata( + @Nonnull final CriterionArray criteria, + @Nonnull final List originalMetadata) { for (Criterion criterion : criteria) { addCriterionFiltersToAggregationMetadata(criterion, originalMetadata); } @@ -642,19 +726,25 @@ private void addCriterionFiltersToAggregationMetadata( @Nonnull final List aggregationMetadata) { // We should never see duplicate aggregation for the same field in aggregation metadata list. - final Map aggregationMetadataMap = aggregationMetadata.stream().collect(Collectors.toMap( - AggregationMetadata::getName, agg -> agg)); + final Map aggregationMetadataMap = + aggregationMetadata.stream() + .collect(Collectors.toMap(AggregationMetadata::getName, agg -> agg)); // Map a filter criterion to a facet field (e.g. domains.keyword -> domains) final String finalFacetField = toFacetField(criterion.getField()); if (finalFacetField == null) { - log.warn(String.format("Found invalid filter field for entity search. Invalid or unrecognized facet %s", criterion.getField())); + log.warn( + String.format( + "Found invalid filter field for entity search. Invalid or unrecognized facet %s", + criterion.getField())); return; } - // We don't want to add urn filters to the aggregations we return as a sidecar to search results. - // They are automatically added by searchAcrossLineage and we dont need them to show up in the filter panel. + // We don't want to add urn filters to the aggregations we return as a sidecar to search + // results. + // They are automatically added by searchAcrossLineage and we dont need them to show up in the + // filter panel. if (finalFacetField.equals(URN_FILTER)) { return; } @@ -667,7 +757,10 @@ private void addCriterionFiltersToAggregationMetadata( */ AggregationMetadata originalAggMetadata = aggregationMetadataMap.get(finalFacetField); if (criterion.hasValues()) { - criterion.getValues().stream().forEach(value -> addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); + criterion.getValues().stream() + .forEach( + value -> + addMissingAggregationValueToAggregationMetadata(value, originalAggMetadata)); } else { addMissingAggregationValueToAggregationMetadata(criterion.getValue(), originalAggMetadata); } @@ -678,21 +771,25 @@ private void addCriterionFiltersToAggregationMetadata( * If there are no results for a particular facet, it will NOT be in the original aggregation set returned by * Elasticsearch. */ - aggregationMetadata.add(buildAggregationMetadata( - finalFacetField, - _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), - new LongMap(criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), - new FilterValueArray(criterion.getValues().stream().map(value -> createFilterValue(value, 0L, true)).collect( - Collectors.toList()))) - ); + aggregationMetadata.add( + buildAggregationMetadata( + finalFacetField, + _filtersToDisplayName.getOrDefault(finalFacetField, finalFacetField), + new LongMap( + criterion.getValues().stream().collect(Collectors.toMap(i -> i, i -> 0L))), + new FilterValueArray( + criterion.getValues().stream() + .map(value -> createFilterValue(value, 0L, true)) + .collect(Collectors.toList())))); } } - private void addMissingAggregationValueToAggregationMetadata(@Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { - if ( - originalMetadata.getAggregations().entrySet().stream().noneMatch(entry -> value.equals(entry.getKey())) - || originalMetadata.getFilterValues().stream().noneMatch(entry -> entry.getValue().equals(value)) - ) { + private void addMissingAggregationValueToAggregationMetadata( + @Nonnull final String value, @Nonnull final AggregationMetadata originalMetadata) { + if (originalMetadata.getAggregations().entrySet().stream() + .noneMatch(entry -> value.equals(entry.getKey())) + || originalMetadata.getFilterValues().stream() + .noneMatch(entry -> entry.getValue().equals(value))) { // No aggregation found for filtered value -- inject one! originalMetadata.getAggregations().put(value, 0L); originalMetadata.getFilterValues().add(createFilterValue(value, 0L, true)); @@ -712,8 +809,7 @@ private AggregationMetadata buildAggregationMetadata( } private List> getFacetFieldDisplayNameFromAnnotation( - @Nonnull final SearchableAnnotation annotation - ) { + @Nonnull final SearchableAnnotation annotation) { final List> facetsFromAnnotation = new ArrayList<>(); // Case 1: Default Keyword field if (annotation.isAddToFilters()) { @@ -721,9 +817,8 @@ private List> getFacetFieldDisplayNameFromAnnotation( } // Case 2: HasX boolean field if (annotation.isAddHasValuesToFilters() && annotation.getHasValuesFieldName().isPresent()) { - facetsFromAnnotation.add(Pair.of( - annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName() - )); + facetsFromAnnotation.add( + Pair.of(annotation.getHasValuesFieldName().get(), annotation.getHasValuesFilterName())); } return facetsFromAnnotation; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java index be64df3179a9d..b49218f4224a9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java @@ -1,6 +1,10 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.bulk.BulkProcessor; @@ -8,12 +12,6 @@ import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.support.WriteRequest; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - - @Slf4j public class BulkListener implements BulkProcessor.Listener { private static final Map INSTANCES = new HashMap<>(); @@ -21,6 +19,7 @@ public class BulkListener implements BulkProcessor.Listener { public static BulkListener getInstance() { return INSTANCES.computeIfAbsent(null, BulkListener::new); } + public static BulkListener getInstance(WriteRequest.RefreshPolicy refreshPolicy) { return INSTANCES.computeIfAbsent(refreshPolicy, BulkListener::new); } @@ -41,10 +40,18 @@ public void beforeBulk(long executionId, BulkRequest request) { @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { if (response.hasFailures()) { - log.error("Failed to feed bulk request. Number of events: " + response.getItems().length + " Took time ms: " - + response.getIngestTookInMillis() + " Message: " + response.buildFailureMessage()); + log.error( + "Failed to feed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + + response.getIngestTookInMillis() + + " Message: " + + response.buildFailureMessage()); } else { - log.info("Successfully fed bulk request. Number of events: " + response.getItems().length + " Took time ms: " + log.info( + "Successfully fed bulk request. Number of events: " + + response.getItems().length + + " Took time ms: " + response.getIngestTookInMillis()); } incrementMetrics(response); @@ -53,20 +60,24 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { // Exception raised outside this method - log.error("Error feeding bulk request. No retries left. Request: {}", buildBulkRequestSummary(request), failure); + log.error( + "Error feeding bulk request. No retries left. Request: {}", + buildBulkRequestSummary(request), + failure); incrementMetrics(request, failure); } private static void incrementMetrics(BulkResponse response) { Arrays.stream(response.getItems()) - .map(req -> buildMetricName(req.getOpType(), req.status().name())) - .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); + .map(req -> buildMetricName(req.getOpType(), req.status().name())) + .forEach(metricName -> MetricUtils.counter(BulkListener.class, metricName).inc()); } private static void incrementMetrics(BulkRequest request, Throwable failure) { request.requests().stream() - .map(req -> buildMetricName(req.opType(), "exception")) - .forEach(metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); + .map(req -> buildMetricName(req.opType(), "exception")) + .forEach( + metricName -> MetricUtils.exceptionCounter(BulkListener.class, metricName, failure)); } private static String buildMetricName(DocWriteRequest.OpType opType, String status) { @@ -74,9 +85,12 @@ private static String buildMetricName(DocWriteRequest.OpType opType, String stat } public static String buildBulkRequestSummary(BulkRequest request) { - return request.requests().stream().map(req -> String.format( - "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", - req.index(), req.opType(), req.opType(), req.id()) - ).collect(Collectors.joining(";")); + return request.requests().stream() + .map( + req -> + String.format( + "Failed to perform bulk request: index [%s], optype: [%s], type [%s], id [%s]", + req.index(), req.opType(), req.opType(), req.id())) + .collect(Collectors.joining(";")); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java index a1e5b363d8a78..a2b9292eac6e4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESBulkProcessor.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.search.elasticsearch.update; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.io.Closeable; +import java.io.IOException; +import java.util.Optional; import lombok.AccessLevel; import lombok.Builder; import lombok.Getter; @@ -21,168 +24,181 @@ import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryRequest; -import java.io.Closeable; -import java.io.IOException; -import java.util.Optional; - - @Slf4j @Builder(builderMethodName = "hiddenBuilder") public class ESBulkProcessor implements Closeable { - private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; - private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; - private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; - private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; - private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; - private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; - private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; - - public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { - return hiddenBuilder().searchClient(searchClient); - } - - @NonNull - private final RestHighLevelClient searchClient; - @Builder.Default - @NonNull - private Boolean async = false; - @Builder.Default - @NonNull - private Boolean batchDelete = false; - @Builder.Default - private Integer bulkRequestsLimit = 500; - @Builder.Default - private Integer bulkFlushPeriod = 1; - @Builder.Default - private Integer numRetries = 3; - @Builder.Default - private Long retryInterval = 1L; - @Builder.Default - private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); - @Getter - private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; - @Setter(AccessLevel.NONE) - @Getter(AccessLevel.NONE) - private final BulkProcessor bulkProcessor; - - private ESBulkProcessor(@NonNull RestHighLevelClient searchClient, @NonNull Boolean async, @NonNull Boolean batchDelete, - Integer bulkRequestsLimit, Integer bulkFlushPeriod, Integer numRetries, Long retryInterval, - TimeValue defaultTimeout, WriteRequest.RefreshPolicy writeRequestRefreshPolicy, - BulkProcessor ignored) { - this.searchClient = searchClient; - this.async = async; - this.batchDelete = batchDelete; - this.bulkRequestsLimit = bulkRequestsLimit; - this.bulkFlushPeriod = bulkFlushPeriod; - this.numRetries = numRetries; - this.retryInterval = retryInterval; - this.defaultTimeout = defaultTimeout; - this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; - this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); - } - - public ESBulkProcessor add(DocWriteRequest request) { - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); - bulkProcessor.add(request); - return this; - } - - public Optional deleteByQuery(QueryBuilder queryBuilder, String... indices) { - return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional deleteByQuery(QueryBuilder queryBuilder, boolean refresh, String... indices) { - return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); - } - - public Optional deleteByQuery(QueryBuilder queryBuilder, boolean refresh, - int limit, TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() - .setQuery(queryBuilder) - .setBatchSize(limit) - .setMaxRetries(numRetries) - .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) - .setTimeout(timeout) - .setRefresh(refresh); - deleteByQueryRequest.indices(indices); - - try { - if (!batchDelete) { - // flush pending writes - bulkProcessor.flush(); - } - // perform delete after local flush - final BulkByScrollResponse deleteResponse = searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); - return Optional.of(deleteResponse); - } catch (Exception e) { - log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); - } - - return Optional.empty(); - } - public Optional deleteByQueryAsync(QueryBuilder queryBuilder, boolean refresh, - int limit, @Nullable TimeValue timeout, String... indices) { - DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest() + private static final String ES_WRITES_METRIC = "num_elasticSearch_writes"; + private static final String ES_BATCHES_METRIC = "num_elasticSearch_batches_submitted"; + private static final String ES_DELETE_EXCEPTION_METRIC = "delete_by_query"; + private static final String ES_SUBMIT_DELETE_EXCEPTION_METRIC = "submit_delete_by_query_task"; + private static final String ES_SUBMIT_REINDEX_METRIC = "reindex_submit"; + private static final String ES_REINDEX_SUCCESS_METRIC = "reindex_success"; + private static final String ES_REINDEX_FAILED_METRIC = "reindex_failed"; + + public static ESBulkProcessor.ESBulkProcessorBuilder builder(RestHighLevelClient searchClient) { + return hiddenBuilder().searchClient(searchClient); + } + + @NonNull private final RestHighLevelClient searchClient; + @Builder.Default @NonNull private Boolean async = false; + @Builder.Default @NonNull private Boolean batchDelete = false; + @Builder.Default private Integer bulkRequestsLimit = 500; + @Builder.Default private Integer bulkFlushPeriod = 1; + @Builder.Default private Integer numRetries = 3; + @Builder.Default private Long retryInterval = 1L; + @Builder.Default private TimeValue defaultTimeout = TimeValue.timeValueMinutes(1); + @Getter private final WriteRequest.RefreshPolicy writeRequestRefreshPolicy; + + @Setter(AccessLevel.NONE) + @Getter(AccessLevel.NONE) + private final BulkProcessor bulkProcessor; + + private ESBulkProcessor( + @NonNull RestHighLevelClient searchClient, + @NonNull Boolean async, + @NonNull Boolean batchDelete, + Integer bulkRequestsLimit, + Integer bulkFlushPeriod, + Integer numRetries, + Long retryInterval, + TimeValue defaultTimeout, + WriteRequest.RefreshPolicy writeRequestRefreshPolicy, + BulkProcessor ignored) { + this.searchClient = searchClient; + this.async = async; + this.batchDelete = batchDelete; + this.bulkRequestsLimit = bulkRequestsLimit; + this.bulkFlushPeriod = bulkFlushPeriod; + this.numRetries = numRetries; + this.retryInterval = retryInterval; + this.defaultTimeout = defaultTimeout; + this.writeRequestRefreshPolicy = writeRequestRefreshPolicy; + this.bulkProcessor = async ? toAsyncBulkProcessor() : toBulkProcessor(); + } + + public ESBulkProcessor add(DocWriteRequest request) { + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(); + bulkProcessor.add(request); + return this; + } + + public Optional deleteByQuery( + QueryBuilder queryBuilder, String... indices) { + return deleteByQuery(queryBuilder, true, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, String... indices) { + return deleteByQuery(queryBuilder, refresh, bulkRequestsLimit, defaultTimeout, indices); + } + + public Optional deleteByQuery( + QueryBuilder queryBuilder, boolean refresh, int limit, TimeValue timeout, String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() .setQuery(queryBuilder) .setBatchSize(limit) .setMaxRetries(numRetries) .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setTimeout(timeout) .setRefresh(refresh); - if (timeout != null) { - deleteByQueryRequest.setTimeout(timeout); - } - // count the number of conflicts, but do not abort the operation - deleteByQueryRequest.setConflicts("proceed"); - deleteByQueryRequest.indices(indices); - try { - // flush pending writes - bulkProcessor.flush(); - TaskSubmissionResponse resp = searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); - MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); - return Optional.of(resp); - } catch (Exception e) { - log.error("ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", e); - MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); - } - return Optional.empty(); - } - - private BulkProcessor toBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - try { - BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); - bulkListener.onResponse(response); - } catch (IOException e) { - bulkListener.onFailure(e); - throw new RuntimeException(e); - } - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); - } + deleteByQueryRequest.indices(indices); - private BulkProcessor toAsyncBulkProcessor() { - return BulkProcessor.builder((request, bulkListener) -> { - searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); - }, BulkListener.getInstance(writeRequestRefreshPolicy)) - .setBulkActions(bulkRequestsLimit) - .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) - // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other retry methods) - .setBackoffPolicy(BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) - .build(); + try { + if (!batchDelete) { + // flush pending writes + bulkProcessor.flush(); + } + // perform delete after local flush + final BulkByScrollResponse deleteResponse = + searchClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_WRITES_METRIC).inc(deleteResponse.getTotal()); + return Optional.of(deleteResponse); + } catch (Exception e) { + log.error("ERROR: Failed to delete by query. See stacktrace for a more detailed error:", e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_DELETE_EXCEPTION_METRIC, e); } - @Override - public void close() throws IOException { - bulkProcessor.close(); + return Optional.empty(); + } + + public Optional deleteByQueryAsync( + QueryBuilder queryBuilder, + boolean refresh, + int limit, + @Nullable TimeValue timeout, + String... indices) { + DeleteByQueryRequest deleteByQueryRequest = + new DeleteByQueryRequest() + .setQuery(queryBuilder) + .setBatchSize(limit) + .setMaxRetries(numRetries) + .setRetryBackoffInitialTime(TimeValue.timeValueSeconds(retryInterval)) + .setRefresh(refresh); + if (timeout != null) { + deleteByQueryRequest.setTimeout(timeout); } - - public void flush() { - bulkProcessor.flush(); + // count the number of conflicts, but do not abort the operation + deleteByQueryRequest.setConflicts("proceed"); + deleteByQueryRequest.indices(indices); + try { + // flush pending writes + bulkProcessor.flush(); + TaskSubmissionResponse resp = + searchClient.submitDeleteByQueryTask(deleteByQueryRequest, RequestOptions.DEFAULT); + MetricUtils.counter(this.getClass(), ES_BATCHES_METRIC).inc(); + return Optional.of(resp); + } catch (Exception e) { + log.error( + "ERROR: Failed to submit a delete by query task. See stacktrace for a more detailed error:", + e); + MetricUtils.exceptionCounter(ESBulkProcessor.class, ES_SUBMIT_DELETE_EXCEPTION_METRIC, e); } + return Optional.empty(); + } + + private BulkProcessor toBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + try { + BulkResponse response = searchClient.bulk(request, RequestOptions.DEFAULT); + bulkListener.onResponse(response); + } catch (IOException e) { + bulkListener.onFailure(e); + throw new RuntimeException(e); + } + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + private BulkProcessor toAsyncBulkProcessor() { + return BulkProcessor.builder( + (request, bulkListener) -> { + searchClient.bulkAsync(request, RequestOptions.DEFAULT, bulkListener); + }, + BulkListener.getInstance(writeRequestRefreshPolicy)) + .setBulkActions(bulkRequestsLimit) + .setFlushInterval(TimeValue.timeValueSeconds(bulkFlushPeriod)) + // This retry is ONLY for "resource constraints", i.e. 429 errors (each request has other + // retry methods) + .setBackoffPolicy( + BackoffPolicy.constantBackoff(TimeValue.timeValueSeconds(retryInterval), numRetries)) + .build(); + } + + @Override + public void close() throws IOException { + bulkProcessor.close(); + } + + public void flush() { + bulkProcessor.flush(); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java index edcdf5654028c..306352523118b 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/ESWriteDAO.java @@ -16,7 +16,6 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.script.Script; - @Slf4j @RequiredArgsConstructor public class ESWriteDAO { @@ -34,10 +33,11 @@ public class ESWriteDAO { * @param document the document to update / insert * @param docId the ID of the document */ - public void upsertDocument(@Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { + public void upsertDocument( + @Nonnull String entityName, @Nonnull String document, @Nonnull String docId) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -57,12 +57,12 @@ public void deleteDocument(@Nonnull String entityName, @Nonnull String docId) { bulkProcessor.add(new DeleteRequest(indexName).id(docId)); } - /** - * Applies a script to a particular document - */ - public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { + /** Applies a script to a particular document */ + public void applyScriptUpdate( + @Nonnull String entityName, @Nonnull String docId, @Nonnull String script) { final String indexName = indexConvention.getIndexName(entityRegistry.getEntitySpec(entityName)); - UpdateRequest updateRequest = new UpdateRequest(indexName, docId) + UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .scriptedUpsert(true) .retryOnConflict(numRetries) @@ -70,9 +70,7 @@ public void applyScriptUpdate(@Nonnull String entityName, @Nonnull String docId, bulkProcessor.add(updateRequest); } - /** - * Clear all documents in all the indices - */ + /** Clear all documents in all the indices */ public void clear() { String[] indices = getIndices(indexConvention.getAllEntityIndicesPattern()); bulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), indices); @@ -80,11 +78,12 @@ public void clear() { private String[] getIndices(String pattern) { try { - GetIndexResponse response = searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); + GetIndexResponse response = + searchClient.indices().get(new GetIndexRequest(pattern), RequestOptions.DEFAULT); return response.getIndices(); } catch (IOException e) { log.error("Failed to get indices using pattern {}", pattern); - return new String[]{}; + return new String[] {}; } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java index fb146a9f4d8cc..d0bcec9b4ef40 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/FeatureExtractor.java @@ -3,13 +3,8 @@ import com.linkedin.metadata.search.SearchEntity; import java.util.List; - -/** - * Interface for extractors that extract Features for each entity returned by search - */ +/** Interface for extractors that extract Features for each entity returned by search */ public interface FeatureExtractor { - /** - * Return the extracted features for each entity returned by search - */ + /** Return the extracted features for each entity returned by search */ List extractFeatures(List entities); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java index f1250ecd61021..2a9571b18b726 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/features/Features.java @@ -9,7 +9,6 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; - @Slf4j @Value public class Features { @@ -54,11 +53,15 @@ public static Features merge(@Nonnull Features features1, @Nonnull Features feat } @Nonnull - public static List merge(@Nonnull List featureList1, @Nonnull List featureList2) { + public static List merge( + @Nonnull List featureList1, @Nonnull List featureList2) { if (featureList1.size() != featureList2.size()) { - throw new IllegalArgumentException(String.format("Expected both lists to have the same number of elements. %s != %s", + throw new IllegalArgumentException( + String.format( + "Expected both lists to have the same number of elements. %s != %s", featureList1.size(), featureList2.size())); } - return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge).collect(Collectors.toList()); + return Streams.zip(featureList1.stream(), featureList2.stream(), Features::merge) + .collect(Collectors.toList()); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java index 4983cae3ddc27..555e90e189bc6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SearchRanker.java @@ -13,10 +13,7 @@ import lombok.SneakyThrows; import lombok.Value; - -/** - * In memory ranker that re-ranks results returned by the search backend - */ +/** In memory ranker that re-ranks results returned by the search backend */ public abstract class SearchRanker> { /** @@ -25,18 +22,19 @@ public abstract class SearchRanker> { public abstract List getFeatureExtractors(); /** - * Return a comparable score for each entity returned by search backend. The ranker will rank based on this score + * Return a comparable score for each entity returned by search backend. The ranker will rank + * based on this score */ public abstract U score(SearchEntity searchEntity); - /** - * Rank the input list of entities - */ + /** Rank the input list of entities */ public List rank(List originalList) { List entitiesToRank = originalList; if (!getFeatureExtractors().isEmpty()) { - entitiesToRank = Streams.zip(originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) - .collect(Collectors.toList()); + entitiesToRank = + Streams.zip( + originalList.stream(), fetchFeatures(originalList).stream(), this::updateFeatures) + .collect(Collectors.toList()); } return entitiesToRank.stream() .map(entity -> new ScoredEntity<>(entity, score(entity))) @@ -45,26 +43,30 @@ public List rank(List originalList) { .collect(Collectors.toList()); } - /** - * Fetch features for each entity returned using the feature extractors - */ + /** Fetch features for each entity returned using the feature extractors */ private List fetchFeatures(List originalList) { List originalFeatures = - originalList.stream().map(SearchEntity::getFeatures).map(Features::from).collect(Collectors.toList()); - return ConcurrencyUtils.transformAndCollectAsync(getFeatureExtractors(), - extractor -> extractor.extractFeatures(originalList)).stream().reduce(originalFeatures, Features::merge); + originalList.stream() + .map(SearchEntity::getFeatures) + .map(Features::from) + .collect(Collectors.toList()); + return ConcurrencyUtils.transformAndCollectAsync( + getFeatureExtractors(), extractor -> extractor.extractFeatures(originalList)) + .stream() + .reduce(originalFeatures, Features::merge); } - /** - * Add the extracted features into each search entity to return the features in the response - */ + /** Add the extracted features into each search entity to return the features in the response */ @SneakyThrows private SearchEntity updateFeatures(SearchEntity originalEntity, Features features) { - return originalEntity.clone() - .setFeatures(new DoubleMap(features.getNumericFeatures() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), Map.Entry::getValue)))); + return originalEntity + .clone() + .setFeatures( + new DoubleMap( + features.getNumericFeatures().entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), Map.Entry::getValue)))); } @Value diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java index 7d009495262cf..c3ab1b49f0e07 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/ranker/SimpleRanker.java @@ -6,9 +6,9 @@ import java.util.List; import java.util.Optional; - /** - * Simple ranker that diversifies the results between different entities. For the same entity, returns the same order from elasticsearch + * Simple ranker that diversifies the results between different entities. For the same entity, + * returns the same order from elasticsearch */ public class SimpleRanker extends SearchRanker { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index 49809cf933936..bfeb993390571 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -14,20 +14,16 @@ import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation.FieldType; import com.linkedin.metadata.models.extractor.FieldExtractor; - import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.Setter; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - - /** * Class that provides a utility function that transforms the snapshot object into a search document */ @@ -47,24 +43,33 @@ public class SearchDocumentTransformer { private SystemEntityClient entityClient; - private static final String BROWSE_PATH_V2_DELIMITER = "␟"; + private static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public Optional transformSnapshot(final RecordTemplate snapshot, final EntitySpec entitySpec, - final Boolean forDelete) { + public Optional transformSnapshot( + final RecordTemplate snapshot, final EntitySpec entitySpec, final Boolean forDelete) { final Map> extractedSearchableFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength).entrySet() - // Delete expects urn to be preserved - .stream().filter(entry -> !forDelete || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchableFieldSpecs, maxValueLength) + .entrySet() + // Delete expects urn to be preserved + .stream() + .filter( + entry -> + !forDelete + || !"urn".equals(entry.getKey().getSearchableAnnotation().getFieldName())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); final Map> extractedSearchScoreFields = - FieldExtractor.extractFieldsFromSnapshot(snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); + FieldExtractor.extractFieldsFromSnapshot( + snapshot, entitySpec, AspectSpec::getSearchScoreFieldSpecs, maxValueLength); if (extractedSearchableFields.isEmpty() && extractedSearchScoreFields.isEmpty()) { return Optional.empty(); } final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", snapshot.data().get("urn").toString()); - extractedSearchableFields.forEach((key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, value) -> setSearchableValue(key, value, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); return Optional.of(searchDocument.toString()); } @@ -83,51 +88,71 @@ public Optional transformAspect( if (!extractedSearchableFields.isEmpty() || !extractedSearchScoreFields.isEmpty()) { final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", urn.toString()); - extractedSearchableFields.forEach((key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); - extractedSearchScoreFields.forEach((key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); + extractedSearchableFields.forEach( + (key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); + extractedSearchScoreFields.forEach( + (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); result = Optional.of(searchDocument.toString()); } return result; } - public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchableValue( + final SearchableFieldSpec fieldSpec, + final List fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); Optional firstValue = fieldValues.stream().findFirst(); boolean isArray = fieldSpec.isArray(); // Set hasValues field if exists - fieldSpec.getSearchableAnnotation().getHasValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); - return; - } - if (valueType == DataSchema.Type.BOOLEAN) { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); - } else { - searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); - } - }); + fieldSpec + .getSearchableAnnotation() + .getHasValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.booleanNode(false)); + return; + } + if (valueType == DataSchema.Type.BOOLEAN) { + searchDocument.set( + fieldName, + JsonNodeFactory.instance.booleanNode((Boolean) firstValue.orElse(false))); + } else { + searchDocument.set( + fieldName, JsonNodeFactory.instance.booleanNode(!fieldValues.isEmpty())); + } + }); // Set numValues field if exists - fieldSpec.getSearchableAnnotation().getNumValuesFieldName().ifPresent(fieldName -> { - if (forDelete) { - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); - return; - } - switch (valueType) { - case INT: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); - break; - case LONG: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); - break; - default: - searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); - break; - } - }); + fieldSpec + .getSearchableAnnotation() + .getNumValuesFieldName() + .ifPresent( + fieldName -> { + if (forDelete) { + searchDocument.set(fieldName, JsonNodeFactory.instance.numberNode((Integer) 0)); + return; + } + switch (valueType) { + case INT: + searchDocument.set( + fieldName, + JsonNodeFactory.instance.numberNode((Integer) firstValue.orElse(0))); + break; + case LONG: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode((Long) firstValue.orElse(0L))); + break; + default: + searchDocument.set( + fieldName, JsonNodeFactory.instance.numberNode(fieldValues.size())); + break; + } + }); final String fieldName = fieldSpec.getSearchableAnnotation().getFieldName(); final FieldType fieldType = fieldSpec.getSearchableAnnotation().getFieldType(); @@ -143,26 +168,35 @@ public void setSearchableValue(final SearchableFieldSpec fieldSpec, final List getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> getNodeForValue(valueType, value, fieldType).ifPresent(arrayNode::add)); searchDocument.set(fieldName, arrayNode); } } else if (valueType == DataSchema.Type.MAP) { ObjectNode dictDoc = JsonNodeFactory.instance.objectNode(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxObjectKeys)).forEach(fieldValue -> { - String[] keyValues = fieldValue.toString().split("="); - String key = keyValues[0]; - String value = keyValues[1]; - dictDoc.put(key, value); - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxObjectKeys)) + .forEach( + fieldValue -> { + String[] keyValues = fieldValue.toString().split("="); + String key = keyValues[0]; + String value = keyValues[1]; + dictDoc.put(key, value); + }); searchDocument.set(fieldName, dictDoc); } else if (!fieldValues.isEmpty()) { - getNodeForValue(valueType, fieldValues.get(0), fieldType).ifPresent(node -> searchDocument.set(fieldName, node)); + getNodeForValue(valueType, fieldValues.get(0), fieldType) + .ifPresent(node -> searchDocument.set(fieldName, node)); } } - public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List fieldValues, - final ObjectNode searchDocument, final Boolean forDelete) { + public void setSearchScoreValue( + final SearchScoreFieldSpec fieldSpec, + final List fieldValues, + final ObjectNode searchDocument, + final Boolean forDelete) { DataSchema.Type valueType = fieldSpec.getPegasusSchema().getType(); final String fieldName = fieldSpec.getSearchScoreAnnotation().getFieldName(); @@ -193,12 +227,14 @@ public void setSearchScoreValue(final SearchScoreFieldSpec fieldSpec, final List default: // Only the above types are supported throw new IllegalArgumentException( - String.format("SearchScore fields must be a numeric type: field %s, value %s", fieldName, fieldValue)); + String.format( + "SearchScore fields must be a numeric type: field %s, value %s", + fieldName, fieldValue)); } } - private Optional getNodeForValue(final DataSchema.Type schemaFieldType, final Object fieldValue, - final FieldType fieldType) { + private Optional getNodeForValue( + final DataSchema.Type schemaFieldType, final Object fieldValue, final FieldType fieldType) { switch (schemaFieldType) { case BOOLEAN: return Optional.of(JsonNodeFactory.instance.booleanNode((Boolean) fieldValue)); @@ -206,30 +242,34 @@ private Optional getNodeForValue(final DataSchema.Type schemaFieldType return Optional.of(JsonNodeFactory.instance.numberNode((Integer) fieldValue)); case LONG: return Optional.of(JsonNodeFactory.instance.numberNode((Long) fieldValue)); - // By default run toString + // By default run toString default: String value = fieldValue.toString(); // If index type is BROWSE_PATH, make sure the value starts with a slash if (fieldType == FieldType.BROWSE_PATH && !value.startsWith("/")) { value = "/" + value; } - return value.isEmpty() ? Optional.empty() + return value.isEmpty() + ? Optional.empty() : Optional.of(JsonNodeFactory.instance.textNode(fieldValue.toString())); } } /** - * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a - * list of strings that we receive. However, we want to aggregate those strings and store - * as a single string in ElasticSearch so we can do prefix matching against it. + * The browsePathsV2 aspect is a list of objects and the @Searchable annotation specifies a list + * of strings that we receive. However, we want to aggregate those strings and store as a single + * string in ElasticSearch so we can do prefix matching against it. */ private String getBrowsePathV2Value(@Nonnull final List fieldValues) { List stringValues = new ArrayList<>(); - fieldValues.subList(0, Math.min(fieldValues.size(), maxArrayLength)).forEach(value -> { - if (value instanceof String) { - stringValues.add((String) value); - } - }); + fieldValues + .subList(0, Math.min(fieldValues.size(), maxArrayLength)) + .forEach( + value -> { + if (value instanceof String) { + stringValues.add((String) value); + } + }); String aggregatedValue = String.join(BROWSE_PATH_V2_DELIMITER, stringValues); // ensure browse path v2 starts with our delimiter if it's not empty if (!aggregatedValue.equals("") && !aggregatedValue.startsWith(BROWSE_PATH_V2_DELIMITER)) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java index a4b59c30607a3..af0f537de8629 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathUtils.java @@ -23,39 +23,59 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class BrowsePathUtils { public static String getDefaultBrowsePath( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, - @Nonnull Character dataPlatformDelimiter) throws URISyntaxException { + @Nonnull Character dataPlatformDelimiter) + throws URISyntaxException { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); String datasetNamePath = getDatasetPath(dsKey.getName(), dataPlatformDelimiter); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath).toLowerCase(); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + datasetNamePath) + .toLowerCase(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + chartKey.getDashboardTool()); case Constants.DASHBOARD_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return ("/" + dashboardKey.getDashboardTool()).toLowerCase(); case Constants.DATA_FLOW_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()) - .toLowerCase(); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster()).toLowerCase(); case Constants.DATA_JOB_ENTITY_NAME: // TODO -> Improve the quality of our browse path here. - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getCluster()) + .toLowerCase(); default: return ""; } @@ -65,60 +85,130 @@ public static String getDefaultBrowsePath( public static Urn buildDataPlatformUrn(Urn urn, EntityRegistry entityRegistry) { switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return dsKey.getPlatform(); case Constants.CHART_ENTITY_NAME: - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, chartKey.getDashboardTool())); case Constants.DASHBOARD_ENTITY_NAME: - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dashboardKey.getDashboardTool())); case Constants.DATA_FLOW_ENTITY_NAME: - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, dataFlowKey.getOrchestrator())); case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, parentFlowKey.getOrchestrator())); case Constants.NOTEBOOK_ENTITY_NAME: - NotebookKey notebookKey = (NotebookKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return UrnUtils.getUrn(String.format("urn:li:%s:%s", Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); + NotebookKey notebookKey = + (NotebookKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return UrnUtils.getUrn( + String.format( + "urn:li:%s:%s", + Constants.DATA_PLATFORM_ENTITY_NAME, notebookKey.getNotebookTool())); default: // Could not resolve a data platform return null; } } - public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) throws URISyntaxException { + public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRegistry) + throws URISyntaxException { switch (urn.getEntityType()) { case "dataset": - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataPlatformKey dpKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKey( - dsKey.getPlatform(), - getKeyAspectSpec(dsKey.getPlatform().getEntityType(), - entityRegistry)); - return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" - + dsKey.getName()).replace('.', '/').toLowerCase(); + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataPlatformKey dpKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKey( + dsKey.getPlatform(), + getKeyAspectSpec(dsKey.getPlatform().getEntityType(), entityRegistry)); + return ("/" + dsKey.getOrigin() + "/" + dpKey.getPlatformName() + "/" + dsKey.getName()) + .replace('.', '/') + .toLowerCase(); case "chart": - ChartKey chartKey = (ChartKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); + ChartKey chartKey = + (ChartKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + chartKey.getDashboardTool() + "/" + chartKey.getChartId()).toLowerCase(); case "dashboard": - DashboardKey dashboardKey = (DashboardKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()).toLowerCase(); + DashboardKey dashboardKey = + (DashboardKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + dashboardKey.getDashboardTool() + "/" + dashboardKey.getDashboardId()) + .toLowerCase(); case "dataFlow": - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - return ("/" + dataFlowKey.getOrchestrator() + "/" + dataFlowKey.getCluster() + "/" + dataFlowKey.getFlowId()) + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + return ("/" + + dataFlowKey.getOrchestrator() + + "/" + + dataFlowKey.getCluster() + + "/" + + dataFlowKey.getFlowId()) .toLowerCase(); case "dataJob": - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - DataFlowKey parentFlowKey = (DataFlowKey) EntityKeyUtils.convertUrnToEntityKey(dataJobKey.getFlow(), - getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); - return ("/" + parentFlowKey.getOrchestrator() + "/" + parentFlowKey.getFlowId() + "/" - + dataJobKey.getJobId()).toLowerCase(); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + DataFlowKey parentFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKey( + dataJobKey.getFlow(), + getKeyAspectSpec(dataJobKey.getFlow().getEntityType(), entityRegistry)); + return ("/" + + parentFlowKey.getOrchestrator() + + "/" + + parentFlowKey.getFlowId() + + "/" + + dataJobKey.getJobId()) + .toLowerCase(); case "glossaryTerm": // TODO: Is this the best way to represent glossary term key? - GlossaryTermKey glossaryTermKey = (GlossaryTermKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + GlossaryTermKey glossaryTermKey = + (GlossaryTermKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); return "/" + glossaryTermKey.getName().replace('.', '/').toLowerCase(); default: return ""; @@ -126,26 +216,28 @@ public static String getLegacyDefaultBrowsePath(Urn urn, EntityRegistry entityRe } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static String getDatasetPath(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static String getDatasetPath( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { if (datasetName.contains(delimiter.toString())) { - final List datasetNamePathParts = Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); + final List datasetNamePathParts = + Arrays.asList(datasetName.split(Pattern.quote(delimiter.toString()))); System.out.println(datasetNamePathParts); // Omit the name from the path. - final String datasetPath = String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); + final String datasetPath = + String.join("/", datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1)); return datasetPath.startsWith("/") ? datasetPath : String.format("/%s", datasetPath); } return ""; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathUtils() { } -} \ No newline at end of file + private BrowsePathUtils() {} +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java index a7f5ea7a51e29..961167663e11f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/BrowsePathV2Utils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -14,10 +16,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; @@ -25,52 +23,63 @@ import java.util.List; import java.util.regex.Pattern; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BrowsePathV2Utils { - final private static String DEFAULT_FOLDER_NAME = "Default"; + private static final String DEFAULT_FOLDER_NAME = "Default"; /** * Generates a default browsePathsV2 aspect for a given urn. * - * If the entity has containers, get its whole container path and set those urns in the path of browsePathsV2. - * If it's a dataset, generate the path from the dataset name like we do for default browsePaths V1. - * If it's a data job, set its parent data flow in the path. - * For everything else, place it in a "Default" folder so we can still navigate to it through browse in the UI. - * This default method should be unneeded once ingestion produces higher quality browsePathsV2 aspects. + *

If the entity has containers, get its whole container path and set those urns in the path of + * browsePathsV2. If it's a dataset, generate the path from the dataset name like we do for + * default browsePaths V1. If it's a data job, set its parent data flow in the path. For + * everything else, place it in a "Default" folder so we can still navigate to it through browse + * in the UI. This default method should be unneeded once ingestion produces higher quality + * browsePathsV2 aspects. */ public static BrowsePathsV2 getDefaultBrowsePathV2( @Nonnull Urn urn, @Nonnull EntityRegistry entityRegistry, @Nonnull Character dataPlatformDelimiter, @Nonnull EntityService entityService, - boolean useContainerPaths) throws URISyntaxException { + boolean useContainerPaths) + throws URISyntaxException { BrowsePathsV2 result = new BrowsePathsV2(); BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); switch (urn.getEntityType()) { case Constants.DATASET_ENTITY_NAME: - DatasetKey dsKey = (DatasetKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - BrowsePathEntryArray datasetContainerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + DatasetKey dsKey = + (DatasetKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + BrowsePathEntryArray datasetContainerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && datasetContainerPathEntries.size() > 0) { browsePathEntries.addAll(datasetContainerPathEntries); } else { - BrowsePathEntryArray defaultDatasetPathEntries = getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); + BrowsePathEntryArray defaultDatasetPathEntries = + getDefaultDatasetPathEntries(dsKey.getName(), dataPlatformDelimiter); if (defaultDatasetPathEntries.size() > 0) { - browsePathEntries.addAll(getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); + browsePathEntries.addAll( + getDefaultDatasetPathEntries(dsKey.getName().toLowerCase(), dataPlatformDelimiter)); } else { browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); } } break; - // Some sources produce charts and dashboards with containers. If we have containers, use them, otherwise use default folder + // Some sources produce charts and dashboards with containers. If we have containers, use + // them, otherwise use default folder case Constants.CHART_ENTITY_NAME: case Constants.DASHBOARD_ENTITY_NAME: - BrowsePathEntryArray containerPathEntries = useContainerPaths ? getContainerPathEntries(urn, entityService) : null; + BrowsePathEntryArray containerPathEntries = + useContainerPaths ? getContainerPathEntries(urn, entityService) : null; if (useContainerPaths && containerPathEntries.size() > 0) { browsePathEntries.addAll(containerPathEntries); } else { @@ -78,8 +87,12 @@ public static BrowsePathsV2 getDefaultBrowsePathV2( } break; case Constants.DATA_JOB_ENTITY_NAME: - DataJobKey dataJobKey = (DataJobKey) EntityKeyUtils.convertUrnToEntityKey(urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); - browsePathEntries.add(createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); + DataJobKey dataJobKey = + (DataJobKey) + EntityKeyUtils.convertUrnToEntityKey( + urn, getKeyAspectSpec(urn.getEntityType(), entityRegistry)); + browsePathEntries.add( + createBrowsePathEntry(dataJobKey.getFlow().toString(), dataJobKey.getFlow())); break; default: browsePathEntries.add(createBrowsePathEntry(DEFAULT_FOLDER_NAME, null)); @@ -99,15 +112,15 @@ private static BrowsePathEntry createBrowsePathEntry(@Nonnull String id, @Nullab return pathEntry; } - private static void aggregateParentContainers(List containerUrns, Urn entityUrn, EntityService entityService) { + private static void aggregateParentContainers( + List containerUrns, Urn entityUrn, EntityService entityService) { try { - EntityResponse entityResponse = entityService.getEntityV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME) - ); + EntityResponse entityResponse = + entityService.getEntityV2( + entityUrn.getEntityType(), entityUrn, Collections.singleton(CONTAINER_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); @@ -116,50 +129,58 @@ private static void aggregateParentContainers(List containerUrns, Urn entit aggregateParentContainers(containerUrns, containerUrn, entityService); } } catch (Exception e) { - log.error(String.format("Error getting containers for entity with urn %s while adding default browsePathV2", entityUrn), e); + log.error( + String.format( + "Error getting containers for entity with urn %s while adding default browsePathV2", + entityUrn), + e); } } /** - * Gets the path of containers for a given entity to create a browsePathV2 off of. - * Recursively call aggregateParentContainers to get the full container path to be included in this path. + * Gets the path of containers for a given entity to create a browsePathV2 off of. Recursively + * call aggregateParentContainers to get the full container path to be included in this path. */ - private static BrowsePathEntryArray getContainerPathEntries(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + private static BrowsePathEntryArray getContainerPathEntries( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); final List containerUrns = new ArrayList<>(); aggregateParentContainers(containerUrns, entityUrn, entityService); - containerUrns.forEach(urn -> { - browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); - }); + containerUrns.forEach( + urn -> { + browsePathEntries.add(createBrowsePathEntry(urn.toString(), urn)); + }); return browsePathEntries; } /** - * Attempts to convert a dataset name into a proper browse path by splitting it using the Data Platform delimiter. - * If there are not > 1 name parts, then an empty string will be returned. + * Attempts to convert a dataset name into a proper browse path by splitting it using the Data + * Platform delimiter. If there are not > 1 name parts, then an empty string will be returned. */ - private static BrowsePathEntryArray getDefaultDatasetPathEntries(@Nonnull final String datasetName, @Nonnull final Character delimiter) { + private static BrowsePathEntryArray getDefaultDatasetPathEntries( + @Nonnull final String datasetName, @Nonnull final Character delimiter) { BrowsePathEntryArray browsePathEntries = new BrowsePathEntryArray(); if (datasetName.contains(delimiter.toString())) { - final List datasetNamePathParts = Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) + final List datasetNamePathParts = + Arrays.stream(datasetName.split(Pattern.quote(delimiter.toString()))) .filter((name) -> !name.isEmpty()) .collect(Collectors.toList()); // Omit the name from the path. - datasetNamePathParts.subList(0, datasetNamePathParts.size() - 1).forEach((part -> { - browsePathEntries.add(createBrowsePathEntry(part, null)); - })); + datasetNamePathParts + .subList(0, datasetNamePathParts.size() - 1) + .forEach( + (part -> { + browsePathEntries.add(createBrowsePathEntry(part, null)); + })); } return browsePathEntries; } protected static AspectSpec getKeyAspectSpec( - final String entityName, - final EntityRegistry registry) { + final String entityName, final EntityRegistry registry) { final EntitySpec spec = registry.getEntitySpec(entityName); return spec.getKeyAspectSpec(); } - private BrowsePathV2Utils() { - - } + private BrowsePathV2Utils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 53765acb8e29e..982b5c8d5f367 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; +import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.EntitySpec; @@ -19,8 +23,8 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RequestOptions; import org.apache.commons.lang.StringUtils; +import org.opensearch.client.RequestOptions; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -35,14 +39,7 @@ import org.opensearch.search.suggest.SuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.KEYWORD_FIELDS; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig.PATH_HIERARCHY_FIELDS; -import static com.linkedin.metadata.search.utils.SearchUtils.isUrn; - - -/** - * TODO: Add more robust unit tests for this critical class. - */ +/** TODO: Add more robust unit tests for this critical class. */ @Slf4j public class ESUtils { @@ -64,33 +61,43 @@ public class ESUtils { public static final String TOKEN_COUNT_FIELD_TYPE = "token_count"; // End of field types - public static final Set FIELD_TYPES_STORED_AS_KEYWORD = Set.of( - SearchableAnnotation.FieldType.KEYWORD, - SearchableAnnotation.FieldType.TEXT, - SearchableAnnotation.FieldType.TEXT_PARTIAL, - SearchableAnnotation.FieldType.WORD_GRAM); - public static final Set FIELD_TYPES_STORED_AS_TEXT = Set.of( - SearchableAnnotation.FieldType.BROWSE_PATH, - SearchableAnnotation.FieldType.BROWSE_PATH_V2, - SearchableAnnotation.FieldType.URN, - SearchableAnnotation.FieldType.URN_PARTIAL); + public static final Set FIELD_TYPES_STORED_AS_KEYWORD = + Set.of( + SearchableAnnotation.FieldType.KEYWORD, + SearchableAnnotation.FieldType.TEXT, + SearchableAnnotation.FieldType.TEXT_PARTIAL, + SearchableAnnotation.FieldType.WORD_GRAM); + public static final Set FIELD_TYPES_STORED_AS_TEXT = + Set.of( + SearchableAnnotation.FieldType.BROWSE_PATH, + SearchableAnnotation.FieldType.BROWSE_PATH_V2, + SearchableAnnotation.FieldType.URN, + SearchableAnnotation.FieldType.URN_PARTIAL); public static final String ENTITY_NAME_FIELD = "_entityName"; public static final String NAME_SUGGESTION = "nameSuggestion"; - // we use this to make sure we filter for editable & non-editable fields. Also expands out top-level properties + // we use this to make sure we filter for editable & non-editable fields. Also expands out + // top-level properties // to field level properties - public static final Map> FIELDS_TO_EXPANDED_FIELDS_LIST = new HashMap>() {{ - put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); - put("glossaryTerms", ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); - put("fieldGlossaryTerms", ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); - put("fieldDescriptions", ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); - put("description", ImmutableList.of("description", "editedDescription")); - }}; - - public static final Set BOOLEAN_FIELDS = ImmutableSet.of( - "removed" - ); + public static final Map> FIELDS_TO_EXPANDED_FIELDS_LIST = + new HashMap>() { + { + put("tags", ImmutableList.of("tags", "fieldTags", "editedFieldTags")); + put( + "glossaryTerms", + ImmutableList.of("glossaryTerms", "fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put("fieldTags", ImmutableList.of("fieldTags", "editedFieldTags")); + put( + "fieldGlossaryTerms", + ImmutableList.of("fieldGlossaryTerms", "editedFieldGlossaryTerms")); + put( + "fieldDescriptions", + ImmutableList.of("fieldDescriptions", "editedFieldDescriptions")); + put("description", ImmutableList.of("description", "editedDescription")); + } + }; + + public static final Set BOOLEAN_FIELDS = ImmutableSet.of("removed"); /* * Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/regexp-syntax.html for list of reserved @@ -98,17 +105,17 @@ public class ESUtils { */ private static final String ELASTICSEARCH_REGEXP_RESERVED_CHARACTERS = "?+*|{}[]()#@&<>~"; - private ESUtils() { - - } + private ESUtils() {} /** * Constructs the filter query given filter map. * - *

Multiple values can be selected for a filter, and it is currently modeled as string separated by comma + *

Multiple values can be selected for a filter, and it is currently modeled as string + * separated by comma * * @param filter the search filter - * @param isTimeseries whether filtering on timeseries index which has differing field type conventions + * @param isTimeseries whether filtering on timeseries index which has differing field type + * conventions * @return built filter query */ @Nonnull @@ -119,65 +126,82 @@ public static BoolQueryBuilder buildFilterQuery(@Nullable Filter filter, boolean } if (filter.getOr() != null) { // If caller is using the new Filters API, build boolean query from that. - filter.getOr().forEach(or -> finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); + filter + .getOr() + .forEach( + or -> + finalQueryBuilder.should(ESUtils.buildConjunctiveFilterQuery(or, isTimeseries))); } else if (filter.getCriteria() != null) { // Otherwise, build boolean query from the deprecated "criteria" field. log.warn("Received query Filter with a deprecated field 'criteria'. Use 'or' instead."); final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - filter.getCriteria().forEach(criterion -> { - if (!criterion.getValue().trim().isEmpty() || criterion.hasValues() - || criterion.getCondition() == Condition.IS_NULL) { - andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - }); + filter + .getCriteria() + .forEach( + criterion -> { + if (!criterion.getValue().trim().isEmpty() + || criterion.hasValues() + || criterion.getCondition() == Condition.IS_NULL) { + andQueryBuilder.must(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + }); finalQueryBuilder.should(andQueryBuilder); } return finalQueryBuilder; } @Nonnull - public static BoolQueryBuilder buildConjunctiveFilterQuery(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - boolean isTimeseries) { + public static BoolQueryBuilder buildConjunctiveFilterQuery( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, boolean isTimeseries) { final BoolQueryBuilder andQueryBuilder = new BoolQueryBuilder(); - conjunctiveCriterion.getAnd().forEach(criterion -> { - if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) - || !criterion.getValue().trim().isEmpty() || criterion.hasValues()) { - if (!criterion.isNegated()) { - // `filter` instead of `must` (enables caching and bypasses scoring) - andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } else { - andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); - } - } - }); + conjunctiveCriterion + .getAnd() + .forEach( + criterion -> { + if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) + || !criterion.getValue().trim().isEmpty() + || criterion.hasValues()) { + if (!criterion.isNegated()) { + // `filter` instead of `must` (enables caching and bypasses scoring) + andQueryBuilder.filter(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } else { + andQueryBuilder.mustNot(getQueryBuilderFromCriterion(criterion, isTimeseries)); + } + } + }); return andQueryBuilder; } /** - * Builds search query given a {@link Criterion}, containing field, value and association/condition between the two. + * Builds search query given a {@link Criterion}, containing field, value and + * association/condition between the two. * - *

If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we construct a Terms query. - * In this case, a field can take multiple values, specified using comma as a delimiter - this method will split - * tokens accordingly. This is done because currently there is no support of associating two different {@link Criterion} - * in a {@link Filter} with an OR operator - default operator is AND. + *

If the condition between a field and value (specified in {@link Criterion}) is EQUAL, we + * construct a Terms query. In this case, a field can take multiple values, specified using comma + * as a delimiter - this method will split tokens accordingly. This is done because currently + * there is no support of associating two different {@link Criterion} in a {@link Filter} with an + * OR operator - default operator is AND. * - *

This approach of supporting multiple values using comma as delimiter, prevents us from specifying a value that has comma - * as one of it's characters. This is particularly true when one of the values is an urn e.g. "urn:li:example:(1,2,3)". - * Hence we do not split the value (using comma as delimiter) if the value starts with "urn:li:". - * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field can take without using - * delimiters like comma. + *

This approach of supporting multiple values using comma as delimiter, prevents us from + * specifying a value that has comma as one of it's characters. This is particularly true when one + * of the values is an urn e.g. "urn:li:example:(1,2,3)". Hence we do not split the value (using + * comma as delimiter) if the value starts with "urn:li:". + * TODO(https://github.com/datahub-project/datahub-gma/issues/51): support multiple values a field + * can take without using delimiters like comma. * - *

If the condition between a field and value is not the same as EQUAL, a Range query is constructed. This - * condition does not support multiple values for the same field. + *

If the condition between a field and value is not the same as EQUAL, a Range query is + * constructed. This condition does not support multiple values for the same field. * - *

When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using wildcard query which is - * not performant according to ES. For details, please refer to: + *

When CONTAIN, START_WITH and END_WITH conditions are used, the underlying logic is using + * wildcard query which is not performant according to ES. For details, please refer to: * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-wildcard-query.html#wildcard-query-field-params * - * @param criterion {@link Criterion} single criterion which contains field, value and a comparison operator + * @param criterion {@link Criterion} single criterion which contains field, value and a + * comparison operator */ @Nonnull - public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion criterion, boolean isTimeseries) { + public static QueryBuilder getQueryBuilderFromCriterion( + @Nonnull final Criterion criterion, boolean isTimeseries) { final String fieldName = toFacetField(criterion.getField()); /* @@ -188,11 +212,12 @@ public static QueryBuilder getQueryBuilderFromCriterion(@Nonnull final Criterion * First we handle this expansion, if required, otherwise we build the filter as usual * without expansion. */ - final Optional> maybeFieldToExpand = Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get( - fieldName)); + final Optional> maybeFieldToExpand = + Optional.ofNullable(FIELDS_TO_EXPANDED_FIELDS_LIST.get(fieldName)); if (maybeFieldToExpand.isPresent()) { - return getQueryBuilderFromCriterionForFieldToExpand(maybeFieldToExpand.get(), criterion, isTimeseries); + return getQueryBuilderFromCriterionForFieldToExpand( + maybeFieldToExpand.get(), criterion, isTimeseries); } return getQueryBuilderFromCriterionForSingleField(criterion, isTimeseries); @@ -220,19 +245,21 @@ public static String getElasticTypeForFieldType(SearchableAnnotation.FieldType f /** * Populates source field of search query with the sort order as per the criterion provided. * - *

- * If no sort criterion is provided then the default sorting criterion is chosen which is descending order of score - * Furthermore to resolve conflicts, the results are further sorted by ascending order of urn - * If the input sort criterion is urn itself, then no additional sort criterion is applied as there will be no conflicts. - * When sorting, set the unmappedType param to arbitrary "keyword" so we essentially ignore sorting where indices do not - * have the field we are sorting on. - *

+ *

If no sort criterion is provided then the default sorting criterion is chosen which is + * descending order of score Furthermore to resolve conflicts, the results are further sorted by + * ascending order of urn If the input sort criterion is urn itself, then no additional sort + * criterion is applied as there will be no conflicts. When sorting, set the unmappedType param to + * arbitrary "keyword" so we essentially ignore sorting where indices do not have the field we are + * sorting on. * - * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort order + * @param searchSourceBuilder {@link SearchSourceBuilder} that needs to be populated with sort + * order * @param sortCriterion {@link SortCriterion} to be applied to the search results */ - public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuilder, - @Nullable SortCriterion sortCriterion, List entitySpecs) { + public static void buildSortOrder( + @Nonnull SearchSourceBuilder searchSourceBuilder, + @Nullable SortCriterion sortCriterion, + List entitySpecs) { if (sortCriterion == null) { searchSourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); } else { @@ -252,12 +279,17 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } } if (fieldTypeForDefault.isEmpty()) { - log.warn("Sort criterion field " + sortCriterion.getField() + " was not found in any entity spec to be searched"); + log.warn( + "Sort criterion field " + + sortCriterion.getField() + + " was not found in any entity spec to be searched"); } final SortOrder esSortOrder = - (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sortCriterion.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; - FieldSortBuilder sortBuilder = new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); + FieldSortBuilder sortBuilder = + new FieldSortBuilder(sortCriterion.getField()).order(esSortOrder); if (fieldTypeForDefault.isPresent()) { String esFieldtype = getElasticTypeForFieldType(fieldTypeForDefault.get()); if (esFieldtype != null) { @@ -266,17 +298,22 @@ public static void buildSortOrder(@Nonnull SearchSourceBuilder searchSourceBuild } searchSourceBuilder.sort(sortBuilder); } - if (sortCriterion == null || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { - searchSourceBuilder.sort(new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); + if (sortCriterion == null + || !sortCriterion.getField().equals(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD)) { + searchSourceBuilder.sort( + new FieldSortBuilder(DEFAULT_SEARCH_RESULTS_SORT_BY_FIELD).order(SortOrder.ASC)); } } /** - * Populates source field of search query with the suggestions query so that we get search suggestions back. - * Right now we are only supporting suggestions based on the virtual _entityName field alias. + * Populates source field of search query with the suggestions query so that we get search + * suggestions back. Right now we are only supporting suggestions based on the virtual _entityName + * field alias. */ - public static void buildNameSuggestions(@Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { - SuggestionBuilder builder = SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); + public static void buildNameSuggestions( + @Nonnull SearchSourceBuilder searchSourceBuilder, @Nullable String textInput) { + SuggestionBuilder builder = + SuggestBuilders.termSuggestion(ENTITY_NAME_FIELD).text(textInput); SuggestBuilder suggestBuilder = new SuggestBuilder(); suggestBuilder.addSuggestion(NAME_SUGGESTION, builder); searchSourceBuilder.suggest(suggestBuilder); @@ -302,34 +339,43 @@ public static String toFacetField(@Nonnull final String filterField) { } @Nonnull - public static String toKeywordField(@Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { + public static String toKeywordField( + @Nonnull final String filterField, @Nonnull final boolean skipKeywordSuffix) { return skipKeywordSuffix || KEYWORD_FIELDS.contains(filterField) || PATH_HIERARCHY_FIELDS.contains(filterField) - || filterField.contains(".") ? filterField : filterField + ESUtils.KEYWORD_SUFFIX; + || filterField.contains(".") + ? filterField + : filterField + ESUtils.KEYWORD_SUFFIX; } - public static RequestOptions buildReindexTaskRequestOptions(String version, String indexName, String tempIndexName) { + public static RequestOptions buildReindexTaskRequestOptions( + String version, String indexName, String tempIndexName) { return RequestOptions.DEFAULT.toBuilder() .addHeader(OPAQUE_ID_HEADER, getOpaqueIdHeaderValue(version, indexName, tempIndexName)) .build(); } - public static String getOpaqueIdHeaderValue(String version, String indexName, String tempIndexName) { + public static String getOpaqueIdHeaderValue( + String version, String indexName, String tempIndexName) { return String.join(HEADER_VALUE_DELIMITER, version, indexName, tempIndexName); } public static boolean prefixMatch(String id, String version, String indexName) { return Optional.ofNullable(id) - .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))).orElse(false); + .map(t -> t.startsWith(String.join(HEADER_VALUE_DELIMITER, version, indexName))) + .orElse(false); } public static String extractTargetIndex(String id) { return id.split("[" + HEADER_VALUE_DELIMITER + "]", 3)[2]; } - public static void setSearchAfter(SearchSourceBuilder searchSourceBuilder, @Nullable Object[] sort, - @Nullable String pitId, @Nullable String keepAlive) { + public static void setSearchAfter( + SearchSourceBuilder searchSourceBuilder, + @Nullable Object[] sort, + @Nullable String pitId, + @Nullable String keepAlive) { if (sort != null && sort.length > 0) { searchSourceBuilder.searchAfter(sort); } @@ -357,41 +403,61 @@ private static QueryBuilder getQueryBuilderFromCriterionForFieldToExpand( criterionToQuery.setValue(criterion.getValue()); } criterionToQuery.setField(toKeywordField(field, isTimeseries)); - orQueryBuilder.should(getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); + orQueryBuilder.should( + getQueryBuilderFromCriterionForSingleField(criterionToQuery, isTimeseries)); } return orQueryBuilder; } @Nonnull - private static QueryBuilder getQueryBuilderFromCriterionForSingleField(@Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { + private static QueryBuilder getQueryBuilderFromCriterionForSingleField( + @Nonnull Criterion criterion, @Nonnull boolean isTimeseries) { final Condition condition = criterion.getCondition(); final String fieldName = toFacetField(criterion.getField()); if (condition == Condition.IS_NULL) { - return QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .mustNot(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (condition == Condition.EXISTS) { - return QueryBuilders.boolQuery().must(QueryBuilders.existsQuery(criterion.getField())).queryName(fieldName); + return QueryBuilders.boolQuery() + .must(QueryBuilders.existsQuery(criterion.getField())) + .queryName(fieldName); } else if (criterion.hasValues() || criterion.hasValue()) { if (condition == Condition.EQUAL) { return buildEqualsConditionFromCriterion(fieldName, criterion, isTimeseries); // TODO: Support multi-match on the following operators (using new 'values' field) } else if (condition == Condition.GREATER_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).gt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.GREATER_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).gte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .gte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN) { - return QueryBuilders.rangeQuery(criterion.getField()).lt(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lt(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.LESS_THAN_OR_EQUAL_TO) { - return QueryBuilders.rangeQuery(criterion.getField()).lte(criterion.getValue().trim()).queryName(fieldName); + return QueryBuilders.rangeQuery(criterion.getField()) + .lte(criterion.getValue().trim()) + .queryName(fieldName); } else if (condition == Condition.CONTAIN) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.START_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*").queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + ESUtils.escapeReservedCharacters(criterion.getValue().trim()) + "*") + .queryName(fieldName); } else if (condition == Condition.END_WITH) { - return QueryBuilders.wildcardQuery(toKeywordField(criterion.getField(), isTimeseries), - "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())).queryName(fieldName); + return QueryBuilders.wildcardQuery( + toKeywordField(criterion.getField(), isTimeseries), + "*" + ESUtils.escapeReservedCharacters(criterion.getValue().trim())) + .queryName(fieldName); } } throw new UnsupportedOperationException("Unsupported condition: " + condition); @@ -416,8 +482,8 @@ private static QueryBuilder buildEqualsConditionFromCriterion( } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the new multi-match 'values' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the new multi-match 'values' field of Criterion.pdl model. */ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( @Nonnull final String fieldName, @@ -432,39 +498,47 @@ private static QueryBuilder buildEqualsConditionFromCriterionWithValues( return QueryBuilders.termQuery(fieldName, Boolean.parseBoolean(criterion.getValues().get(0))) .queryName(fieldName); } - return QueryBuilders.termsQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) + return QueryBuilders.termsQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValues()) .queryName(fieldName); } /** - * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which - * was created using the deprecated 'value' field of Criterion.pdl model. + * Builds an instance of {@link QueryBuilder} representing an EQUALS condition which was created + * using the deprecated 'value' field of Criterion.pdl model. * - * Previously, we supported comma-separate values inside of a single string field, - * thus we have to account for splitting and matching against each value below. + *

Previously, we supported comma-separate values inside of a single string field, thus we have + * to account for splitting and matching against each value below. * - * For all new code, we should be using the new 'values' field for performing multi-match. This + *

For all new code, we should be using the new 'values' field for performing multi-match. This * is simply retained for backwards compatibility of the search API. */ private static QueryBuilder buildEqualsFromCriterionWithValue( @Nonnull final String fieldName, @Nonnull final Criterion criterion, final boolean isTimeseries) { - // If the value is an URN style value, then we do not attempt to split it by comma (for obvious reasons) + // If the value is an URN style value, then we do not attempt to split it by comma (for obvious + // reasons) if (isUrn(criterion.getValue())) { - return QueryBuilders.matchQuery(toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) + return QueryBuilders.matchQuery( + toKeywordField(criterion.getField(), isTimeseries), criterion.getValue().trim()) .queryName(fieldName) .analyzer(KEYWORD_ANALYZER); } final BoolQueryBuilder filters = new BoolQueryBuilder(); // Cannot assume the existence of a .keyword or other subfield (unless contains `.`) // Cannot assume the type of the underlying field or subfield thus KEYWORD_ANALYZER is forced - List fields = criterion.getField().contains(".") ? List.of(criterion.getField()) - : List.of(criterion.getField(), criterion.getField() + ".*"); + List fields = + criterion.getField().contains(".") + ? List.of(criterion.getField()) + : List.of(criterion.getField(), criterion.getField() + ".*"); Arrays.stream(criterion.getValue().trim().split("\\s*,\\s*")) - .forEach(elem -> filters.should(QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) - .queryName(fieldName) - .analyzer(KEYWORD_ANALYZER))); + .forEach( + elem -> + filters.should( + QueryBuilders.multiMatchQuery(elem, fields.toArray(new String[0])) + .queryName(fieldName) + .analyzer(KEYWORD_ANALYZER))); return filters; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java index 62a8cd932885e..97eb6ade468ea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/FilterUtils.java @@ -8,17 +8,24 @@ import java.util.Map; import java.util.Set; - public class FilterUtils { - private FilterUtils() { - } + private FilterUtils() {} private static final List FILTER_RANKING = - ImmutableList.of("_entityType", "typeNames", "platform", "domains", "tags", "glossaryTerms", "container", "owners", + ImmutableList.of( + "_entityType", + "typeNames", + "platform", + "domains", + "tags", + "glossaryTerms", + "container", + "owners", "origin"); - public static List rankFilterGroups(Map aggregations) { + public static List rankFilterGroups( + Map aggregations) { Set filterGroups = new HashSet<>(aggregations.keySet()); List finalAggregations = new ArrayList<>(aggregations.size()); for (String filterName : FILTER_RANKING) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java index b026686f7abfd..9f1041eaaeca3 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/GZIPUtil.java @@ -7,9 +7,8 @@ import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; - public class GZIPUtil { - private GZIPUtil() { } + private GZIPUtil() {} public static String gzipDecompress(byte[] gzipped) { String unzipped; @@ -30,7 +29,8 @@ public static String gzipDecompress(byte[] gzipped) { public static byte[] gzipCompress(String unzipped) { byte[] gzipped; - try (ByteArrayInputStream bis = new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); + try (ByteArrayInputStream bis = + new ByteArrayInputStream(unzipped.getBytes(StandardCharsets.UTF_8)); ByteArrayOutputStream bos = new ByteArrayOutputStream(); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(bos)) { byte[] buffer = new byte[1024]; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java index 8b56ae0beb3f1..b8cf0626b7251 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/SearchUtils.java @@ -32,13 +32,10 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; - @Slf4j public class SearchUtils { - private SearchUtils() { - - } + private SearchUtils() {} public static Optional getDocId(@Nonnull Urn urn) { try { @@ -64,23 +61,31 @@ public static Map getRequestMap(@Nullable Filter requestParams) ConjunctiveCriterionArray disjunction = requestParams.getOr(); if (disjunction.size() > 1) { - throw new UnsupportedOperationException("To build request map, there must be only one conjunction group."); + throw new UnsupportedOperationException( + "To build request map, there must be only one conjunction group."); } - CriterionArray criterionArray = disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); + CriterionArray criterionArray = + disjunction.size() > 0 ? disjunction.get(0).getAnd() : new CriterionArray(); - criterionArray.forEach(criterion -> { - if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals(criterion.getCondition())) { - throw new UnsupportedOperationException("Unsupported condition: " + criterion.getCondition()); - } - }); + criterionArray.forEach( + criterion -> { + if (!com.linkedin.metadata.query.filter.Condition.EQUAL.equals( + criterion.getCondition())) { + throw new UnsupportedOperationException( + "Unsupported condition: " + criterion.getCondition()); + } + }); - return criterionArray.stream().collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); + return criterionArray.stream() + .collect(Collectors.toMap(Criterion::getField, Criterion::getValue)); } public static boolean isUrn(@Nonnull String value) { - // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a hack to support searching for - // URNs that have commas in them, while also using commas a delimiter for search. We should stop supporting commas + // TODO(https://github.com/datahub-project/datahub-gma/issues/51): This method is a bit of a + // hack to support searching for + // URNs that have commas in them, while also using commas a delimiter for search. We should stop + // supporting commas // as delimiter, and then we can stop using this hack. return value.startsWith("urn:li:"); } @@ -104,40 +109,52 @@ public static String readResourceFile(@Nonnull Class clazz, @Nonnull String file } } - public static Filter removeCriteria(@Nullable Filter originalFilter, Predicate shouldRemove) { + public static Filter removeCriteria( + @Nullable Filter originalFilter, Predicate shouldRemove) { if (originalFilter != null && originalFilter.getOr() != null) { - return new Filter().setOr(new ConjunctiveCriterionArray(originalFilter.getOr() - .stream() - .map(criteria -> removeCriteria(criteria, shouldRemove)) - .filter(criteria -> !criteria.getAnd().isEmpty()) - .collect(Collectors.toList()))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + originalFilter.getOr().stream() + .map(criteria -> removeCriteria(criteria, shouldRemove)) + .filter(criteria -> !criteria.getAnd().isEmpty()) + .collect(Collectors.toList()))); } return originalFilter; } - private static ConjunctiveCriterion removeCriteria(@Nonnull ConjunctiveCriterion conjunctiveCriterion, - Predicate shouldRemove) { - return new ConjunctiveCriterion().setAnd(new CriterionArray(conjunctiveCriterion.getAnd() - .stream() - .filter(criterion -> !shouldRemove.test(criterion)) - .collect(Collectors.toList()))); + private static ConjunctiveCriterion removeCriteria( + @Nonnull ConjunctiveCriterion conjunctiveCriterion, Predicate shouldRemove) { + return new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + conjunctiveCriterion.getAnd().stream() + .filter(criterion -> !shouldRemove.test(criterion)) + .collect(Collectors.toList()))); } @SneakyThrows public static AggregationMetadata merge(AggregationMetadata one, AggregationMetadata two) { Map mergedMap = - Stream.concat(one.getAggregations().entrySet().stream(), two.getAggregations().entrySet().stream()) + Stream.concat( + one.getAggregations().entrySet().stream(), + two.getAggregations().entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, Long::sum)); - // we want to make sure the values that were used in the filter are prioritized to appear in the response aggregation - Set filteredValues = Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()).filter(val -> val.isFiltered()).map( - val -> val.getValue() - ).collect(Collectors.toSet()); + // we want to make sure the values that were used in the filter are prioritized to appear in the + // response aggregation + Set filteredValues = + Stream.concat(one.getFilterValues().stream(), two.getFilterValues().stream()) + .filter(val -> val.isFiltered()) + .map(val -> val.getValue()) + .collect(Collectors.toSet()); return one.clone() - .setDisplayName(two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) + .setDisplayName( + two.getDisplayName() != two.getName() ? two.getDisplayName() : one.getDisplayName()) .setAggregations(new LongMap(mergedMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(mergedMap, filteredValues))); } public static ListResult toListResult(final SearchResult searchResult) { @@ -149,13 +166,16 @@ public static ListResult toListResult(final SearchResult searchResult) { listResult.setCount(searchResult.getPageSize()); listResult.setTotal(searchResult.getNumEntities()); listResult.setEntities( - new UrnArray(searchResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()))); + new UrnArray( + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); return listResult; } @SneakyThrows - public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFlags, @Nullable String query, - @Nonnull SearchFlags defaultFlags) { + public static SearchFlags applyDefaultSearchFlags( + @Nullable SearchFlags inputFlags, @Nullable String query, @Nonnull SearchFlags defaultFlags) { SearchFlags finalSearchFlags = inputFlags != null ? inputFlags : defaultFlags.copy(); if (!finalSearchFlags.hasFulltext() || finalSearchFlags.isFulltext() == null) { finalSearchFlags.setFulltext(defaultFlags.isFulltext()); @@ -163,7 +183,8 @@ public static SearchFlags applyDefaultSearchFlags(@Nullable SearchFlags inputFla if (query == null || Set.of("*", "").contains(query)) { // No highlighting if no query string finalSearchFlags.setSkipHighlighting(true); - } else if (!finalSearchFlags.hasSkipHighlighting() || finalSearchFlags.isSkipHighlighting() == null) { + } else if (!finalSearchFlags.hasSkipHighlighting() + || finalSearchFlags.isSkipHighlighting() == null) { finalSearchFlags.setSkipHighlighting(defaultFlags.isSkipHighlighting()); } if (!finalSearchFlags.hasSkipAggregates() || finalSearchFlags.isSkipAggregates() == null) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index ea7286112f870..b2c615c1f47f5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; @@ -41,7 +44,6 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; - import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; @@ -59,9 +61,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - @Slf4j public class UpdateIndicesService { private static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -76,13 +75,12 @@ public class UpdateIndicesService { @Value("${featureFlags.graphServiceDiffModeEnabled:true}") private boolean _graphDiffMode; + @Value("${featureFlags.searchServiceDiffModeEnabled:true}") private boolean _searchDiffMode; - private static final Set UPDATE_CHANGE_TYPES = ImmutableSet.of( - ChangeType.UPSERT, - ChangeType.RESTATE, - ChangeType.PATCH); + private static final Set UPDATE_CHANGE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); @VisibleForTesting public void setGraphDiffMode(boolean graphDiffMode) { @@ -95,13 +93,13 @@ public void setSearchDiffMode(boolean searchDiffMode) { } public UpdateIndicesService( - GraphService graphService, - EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, - SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { _graphService = graphService; _entitySearchService = entitySearchService; _timeseriesAspectService = timeseriesAspectService; @@ -123,14 +121,12 @@ public void handleChangeEvent(@Nonnull final MetadataChangeLog event) { } } - /** - * This very important method processes {@link MetadataChangeLog} events - * that represent changes to the Metadata Graph. + * This very important method processes {@link MetadataChangeLog} events that represent changes to + * the Metadata Graph. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores in response to a given change type to reflect - * the changes present in the new aspect. + *

In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * in response to a given change type to reflect the changes present in the new aspect. * * @param event the change event to be processed. */ @@ -147,23 +143,29 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), event.getAspect().getContentType(), aspectSpec); GenericAspect previousAspectValue = event.getPreviousAspectValue(); - RecordTemplate previousAspect = previousAspectValue != null - ? GenericRecordUtils.deserializeAspect(previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) - : null; + RecordTemplate previousAspect = + previousAspectValue != null + ? GenericRecordUtils.deserializeAspect( + previousAspectValue.getValue(), previousAspectValue.getContentType(), aspectSpec) + : null; // Step 0. If the aspect is timeseries, add to its timeseries index. if (aspectSpec.isTimeseries()) { - updateTimeseriesFields(event.getEntityType(), event.getAspectName(), urn, aspect, aspectSpec, + updateTimeseriesFields( + event.getEntityType(), + event.getAspectName(), + urn, + aspect, + aspectSpec, event.getSystemMetadata()); } else { // Inject into the System Metadata Index when an aspect is non-timeseries only. @@ -173,13 +175,16 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } // Step 1. For all aspects, attempt to update Search - updateSearchService(entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); + updateSearchService( + entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); // Step 2. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); - if (_graphDiffMode && !(_graphService instanceof DgraphGraphService) - && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_graphDiffMode + && !(_graphService instanceof DgraphGraphService) + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { updateGraphServiceDiff(urn, aspectSpec, previousAspect, aspect, event); } else { updateGraphService(urn, aspectSpec, aspect, event); @@ -187,14 +192,14 @@ public void handleUpdateChangeEvent(@Nonnull final MetadataChangeLog event) thro } /** - * This very important method processes {@link MetadataChangeLog} deletion events - * to cleanup the Metadata Graph when an aspect or entity is removed. + * This very important method processes {@link MetadataChangeLog} deletion events to cleanup the + * Metadata Graph when an aspect or entity is removed. * - * In particular, it handles updating the Search, Graph, Timeseries, and - * System Metadata stores to reflect the deletion of a particular aspect. + *

In particular, it handles updating the Search, Graph, Timeseries, and System Metadata stores + * to reflect the deletion of a particular aspect. * - * Note that if an entity's key aspect is deleted, the entire entity will be purged - * from search, graph, timeseries, etc. + *

Note that if an entity's key aspect is deleted, the entire entity will be purged from + * search, graph, timeseries, etc. * * @param event the change event to be processed. */ @@ -211,19 +216,23 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { AspectSpec aspectSpec = entitySpec.getAspectSpec(event.getAspectName()); if (aspectSpec == null) { throw new RuntimeException( - String.format("Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", - event.getEntityType(), - event.getAspectName())); + String.format( + "Failed to retrieve Aspect Spec for entity with name %s, aspect with name %s. Cannot update indices for MCL.", + event.getEntityType(), event.getAspectName())); } - RecordTemplate aspect = GenericRecordUtils.deserializeAspect(event.getPreviousAspectValue().getValue(), - event.getPreviousAspectValue().getContentType(), aspectSpec); + RecordTemplate aspect = + GenericRecordUtils.deserializeAspect( + event.getPreviousAspectValue().getValue(), + event.getPreviousAspectValue().getContentType(), + aspectSpec); Boolean isDeletingKey = event.getAspectName().equals(entitySpec.getKeyAspectName()); if (!aspectSpec.isTimeseries()) { deleteSystemMetadata(urn, aspectSpec, isDeletingKey); deleteGraphData(urn, aspectSpec, aspect, isDeletingKey, event); - deleteSearchData(_entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); + deleteSearchData( + _entitySearchService, urn, entitySpec.getName(), aspectSpec, aspect, isDeletingKey); } } @@ -231,8 +240,7 @@ public void handleDeleteChangeEvent(@Nonnull final MetadataChangeLog event) { private void updateFineGrainedEdgesAndRelationships( RecordTemplate aspect, List edgesToAdd, - HashMap> urnToRelationshipTypesBeingAdded - ) { + HashMap> urnToRelationshipTypesBeingAdded) { UpstreamLineage upstreamLineage = new UpstreamLineage(aspect.data()); if (upstreamLineage.getFineGrainedLineages() != null) { for (FineGrainedLineage fineGrainedLineage : upstreamLineage.getFineGrainedLineages()) { @@ -243,8 +251,10 @@ private void updateFineGrainedEdgesAndRelationships( for (Urn downstream : fineGrainedLineage.getDownstreams()) { for (Urn upstream : fineGrainedLineage.getUpstreams()) { // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); - Set relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); + edgesToAdd.add( + new Edge(downstream, upstream, DOWNSTREAM_OF, null, null, null, null, null)); + Set relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(downstream, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(downstream, relationshipTypes); } @@ -253,10 +263,14 @@ private void updateFineGrainedEdgesAndRelationships( } } - private Urn generateSchemaFieldUrn(@Nonnull final String resourceUrn, @Nonnull final String fieldPath) { - // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on the ingestion side - final String encodedFieldPath = fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); - final SchemaFieldKey key = new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); + private Urn generateSchemaFieldUrn( + @Nonnull final String resourceUrn, @Nonnull final String fieldPath) { + // we rely on schemaField fieldPaths to be encoded since we do that with fineGrainedLineage on + // the ingestion side + final String encodedFieldPath = + fieldPath.replaceAll("\\(", "%28").replaceAll("\\)", "%29").replaceAll(",", "%2C"); + final SchemaFieldKey key = + new SchemaFieldKey().setParent(UrnUtils.getUrn(resourceUrn)).setFieldPath(encodedFieldPath); return EntityKeyUtils.convertEntityKeyToUrn(key, Constants.SCHEMA_FIELD_ENTITY_NAME); } @@ -265,15 +279,27 @@ private void updateInputFieldEdgesAndRelationships( @Nonnull final Urn urn, @Nonnull final InputFields inputFields, @Nonnull final List edgesToAdd, - @Nonnull final HashMap> urnToRelationshipTypesBeingAdded - ) { + @Nonnull final HashMap> urnToRelationshipTypesBeingAdded) { if (inputFields.hasFields()) { for (final InputField field : inputFields.getFields()) { - if (field.hasSchemaFieldUrn() && field.hasSchemaField() && field.getSchemaField().hasFieldPath()) { - final Urn sourceFieldUrn = generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); + if (field.hasSchemaFieldUrn() + && field.hasSchemaField() + && field.getSchemaField().hasFieldPath()) { + final Urn sourceFieldUrn = + generateSchemaFieldUrn(urn.toString(), field.getSchemaField().getFieldPath()); // TODO: add edges uniformly across aspects - edgesToAdd.add(new Edge(sourceFieldUrn, field.getSchemaFieldUrn(), DOWNSTREAM_OF, null, null, null, null, null)); - final Set relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); + edgesToAdd.add( + new Edge( + sourceFieldUrn, + field.getSchemaFieldUrn(), + DOWNSTREAM_OF, + null, + null, + null, + null, + null)); + final Set relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(sourceFieldUrn, new HashSet<>()); relationshipTypes.add(DOWNSTREAM_OF); urnToRelationshipTypesBeingAdded.put(sourceFieldUrn, relationshipTypes); } @@ -286,54 +312,59 @@ private Pair, HashMap>> getEdgesAndRelationshipTypes @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final MetadataChangeLog event, - final boolean isNewAspectVersion - ) { + final boolean isNewAspectVersion) { final List edgesToAdd = new ArrayList<>(); final HashMap> urnToRelationshipTypesBeingAdded = new HashMap<>(); - // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and inputFields + // we need to manually set schemaField <-> schemaField edges for fineGrainedLineage and + // inputFields // since @Relationship only links between the parent entity urn and something else. if (aspectSpec.getName().equals(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { updateFineGrainedEdgesAndRelationships(aspect, edgesToAdd, urnToRelationshipTypesBeingAdded); } if (aspectSpec.getName().equals(Constants.INPUT_FIELDS_ASPECT_NAME)) { final InputFields inputFields = new InputFields(aspect.data()); - updateInputFieldEdgesAndRelationships(urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); + updateInputFieldEdgesAndRelationships( + urn, inputFields, edgesToAdd, urnToRelationshipTypesBeingAdded); } Map> extractedFields = FieldExtractor.extractFields(aspect, aspectSpec.getRelationshipFieldSpecs()); for (Map.Entry> entry : extractedFields.entrySet()) { - Set relationshipTypes = urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); + Set relationshipTypes = + urnToRelationshipTypesBeingAdded.getOrDefault(urn, new HashSet<>()); relationshipTypes.add(entry.getKey().getRelationshipName()); urnToRelationshipTypesBeingAdded.put(urn, relationshipTypes); - final List newEdges = GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); + final List newEdges = + GraphIndexUtils.extractGraphEdges(entry, aspect, urn, event, isNewAspectVersion); edgesToAdd.addAll(newEdges); } return Pair.of(edgesToAdd, urnToRelationshipTypesBeingAdded); } - /** - * Process snapshot and update graph index - */ + /** Process snapshot and update graph index */ private void updateGraphService( @Nonnull final Urn urn, @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair, HashMap>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); final List edgesToAdd = edgeAndRelationTypes.getFirst(); - final HashMap> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); log.debug("Here's the relationship types found {}", urnToRelationshipTypesBeingAdded); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } edgesToAdd.forEach(_graphService::addEdge); } @@ -344,15 +375,17 @@ private void updateGraphServiceDiff( @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspect, @Nonnull final RecordTemplate newAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { Pair, HashMap>> oldEdgeAndRelationTypes = null; if (oldAspect != null) { - oldEdgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); + oldEdgeAndRelationTypes = + getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, oldAspect, event, false); } final List oldEdges = - oldEdgeAndRelationTypes != null ? oldEdgeAndRelationTypes.getFirst() : Collections.emptyList(); + oldEdgeAndRelationTypes != null + ? oldEdgeAndRelationTypes.getFirst() + : Collections.emptyList(); final Set oldEdgeSet = new HashSet<>(oldEdges); Pair, HashMap>> newEdgeAndRelationTypes = @@ -362,14 +395,12 @@ private void updateGraphServiceDiff( final Set newEdgeSet = new HashSet<>(newEdges); // Edges to add - final List additiveDifference = newEdgeSet.stream() - .filter(edge -> !oldEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List additiveDifference = + newEdgeSet.stream().filter(edge -> !oldEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to remove - final List subtractiveDifference = oldEdgeSet.stream() - .filter(edge -> !newEdgeSet.contains(edge)) - .collect(Collectors.toList()); + final List subtractiveDifference = + oldEdgeSet.stream().filter(edge -> !newEdgeSet.contains(edge)).collect(Collectors.toList()); // Edges to update final List mergedEdges = getMergedEdges(oldEdgeSet, newEdgeSet); @@ -394,17 +425,18 @@ private void updateGraphServiceDiff( } private static List getMergedEdges(final Set oldEdgeSet, final Set newEdgeSet) { - final Map oldEdgesMap = oldEdgeSet - .stream() - .map(edge -> Pair.of(edge.hashCode(), edge)) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); + final Map oldEdgesMap = + oldEdgeSet.stream() + .map(edge -> Pair.of(edge.hashCode(), edge)) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); final List mergedEdges = new ArrayList<>(); if (!oldEdgesMap.isEmpty()) { for (com.linkedin.metadata.graph.Edge newEdge : newEdgeSet) { if (oldEdgesMap.containsKey(newEdge.hashCode())) { final com.linkedin.metadata.graph.Edge oldEdge = oldEdgesMap.get(newEdge.hashCode()); - final com.linkedin.metadata.graph.Edge mergedEdge = GraphIndexUtils.mergeEdges(oldEdge, newEdge); + final com.linkedin.metadata.graph.Edge mergedEdge = + GraphIndexUtils.mergeEdges(oldEdge, newEdge); mergedEdges.add(mergedEdge); } } @@ -413,18 +445,21 @@ private static List getMergedEdges(final Set oldEdgeSet, final Set searchDocument; Optional previousSearchDocument = Optional.empty(); try { searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -439,14 +474,18 @@ private void updateSearchService(String entityName, Urn urn, } String searchDocumentValue = searchDocument.get(); - if (_searchDiffMode && (systemMetadata == null || systemMetadata.getProperties() == null - || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { + if (_searchDiffMode + && (systemMetadata == null + || systemMetadata.getProperties() == null + || !Boolean.parseBoolean(systemMetadata.getProperties().get(FORCE_INDEXING_KEY)))) { if (previousAspect != null) { try { - previousSearchDocument = _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); + previousSearchDocument = + _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); } catch (Exception e) { log.error( - "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", e, + "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", + e, aspectSpec.getName()); } } @@ -463,11 +502,14 @@ private void updateSearchService(String entityName, Urn urn, _entitySearchService.upsertDocument(entityName, searchDocument.get(), docId.get()); } - /** - * Process snapshot and update time-series index - */ - private void updateTimeseriesFields(String entityType, String aspectName, Urn urn, RecordTemplate aspect, - AspectSpec aspectSpec, SystemMetadata systemMetadata) { + /** Process snapshot and update time-series index */ + private void updateTimeseriesFields( + String entityType, + String aspectName, + Urn urn, + RecordTemplate aspect, + AspectSpec aspectSpec, + SystemMetadata systemMetadata) { Map documents; try { documents = TimeseriesAspectTransformer.transform(urn, aspect, aspectSpec, systemMetadata); @@ -475,12 +517,17 @@ private void updateTimeseriesFields(String entityType, String aspectName, Urn ur log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(entityType, aspectName, document.getKey(), document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + entityType, aspectName, document.getKey(), document.getValue()); + }); } - private void updateSystemMetadata(SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { + private void updateSystemMetadata( + SystemMetadata systemMetadata, Urn urn, AspectSpec aspectSpec, RecordTemplate aspect) { _systemMetadataService.insert(systemMetadata, urn.toString(), aspectSpec.getName()); // If processing status aspect update all aspects for this urn to removed @@ -496,7 +543,9 @@ private void deleteSystemMetadata(Urn urn, AspectSpec aspectSpec, Boolean isKeyA _systemMetadataService.deleteUrn(urn.toString()); } else { // Delete all aspects from system metadata service - log.debug(String.format("Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); + log.debug( + String.format( + "Deleting system metadata for urn: %s, aspect: %s", urn, aspectSpec.getName())); _systemMetadataService.deleteAspect(urn.toString(), aspectSpec.getName()); } } @@ -506,8 +555,7 @@ private void deleteGraphData( @Nonnull final AspectSpec aspectSpec, @Nonnull final RecordTemplate aspect, @Nonnull final Boolean isKeyAspect, - @Nonnull final MetadataChangeLog event - ) { + @Nonnull final MetadataChangeLog event) { if (isKeyAspect) { _graphService.removeNode(urn); return; @@ -516,17 +564,27 @@ private void deleteGraphData( Pair, HashMap>> edgeAndRelationTypes = getEdgesAndRelationshipTypesFromAspect(urn, aspectSpec, aspect, event, true); - final HashMap> urnToRelationshipTypesBeingAdded = edgeAndRelationTypes.getSecond(); + final HashMap> urnToRelationshipTypesBeingAdded = + edgeAndRelationTypes.getSecond(); if (urnToRelationshipTypesBeingAdded.size() > 0) { for (Map.Entry> entry : urnToRelationshipTypesBeingAdded.entrySet()) { - _graphService.removeEdgesFromNode(entry.getKey(), new ArrayList<>(entry.getValue()), - createRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)); + _graphService.removeEdgesFromNode( + entry.getKey(), + new ArrayList<>(entry.getValue()), + createRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING)); } } } - private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, String entityName, - AspectSpec aspectSpec, RecordTemplate aspect, Boolean isKeyAspect) { + private void deleteSearchData( + EntitySearchService entitySearchService, + Urn urn, + String entityName, + AspectSpec aspectSpec, + RecordTemplate aspect, + Boolean isKeyAspect) { String docId; try { docId = URLEncoder.encode(urn.toString(), "UTF-8"); @@ -542,9 +600,11 @@ private void deleteSearchData(EntitySearchService entitySearchService, Urn urn, Optional searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO + searchDocument = + _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -560,14 +620,16 @@ private EntitySpec getEventEntitySpec(@Nonnull final MetadataChangeLog event) { return _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { throw new RuntimeException( - String.format("Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", + String.format( + "Failed to retrieve Entity Spec for entity with name %s. Cannot update indices for MCL.", event.getEntityType())); } } /** - * Allow internal use of the system entity client. Solves recursive dependencies between the UpdateIndicesService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * UpdateIndicesService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ public void setSystemEntityClient(SystemEntityClient systemEntityClient) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java index 64ad88c08a741..9aa0cdca99f68 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/shared/ElasticSearchIndexed.java @@ -1,22 +1,20 @@ package com.linkedin.metadata.shared; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; - import java.io.IOException; import java.util.List; - public interface ElasticSearchIndexed { - /** - * The index configurations for the given service. - * @return List of reindex configurations - */ - List buildReindexConfigs() throws IOException; + /** + * The index configurations for the given service. + * + * @return List of reindex configurations + */ + List buildReindexConfigs() throws IOException; - /** - * Mirrors the service's functions which - * are expected to build/reindex as needed based - * on the reindex configurations above - */ - void reindexAll() throws IOException; + /** + * Mirrors the service's functions which are expected to build/reindex as needed based on the + * reindex configurations above + */ + void reindexAll() throws IOException; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java index 5eb03eb23d01a..cf1674ac00480 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ESSystemMetadataDAO.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.systemmetadata; +import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.search.utils.ESUtils; @@ -34,9 +36,6 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService.INDEX_NAME; - - @Slf4j @RequiredArgsConstructor public class ESSystemMetadataDAO { @@ -47,17 +46,18 @@ public class ESSystemMetadataDAO { /** * Gets the status of a Task running in ElasticSearch + * * @param taskId the task ID to get the status of */ public Optional getTaskStatus(@Nonnull String nodeId, long taskId) { - final GetTaskRequest taskRequest = new GetTaskRequest( - nodeId, - taskId - ); + final GetTaskRequest taskRequest = new GetTaskRequest(nodeId, taskId); try { return client.tasks().get(taskRequest, RequestOptions.DEFAULT); } catch (IOException e) { - log.error(String.format("ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", nodeId, taskId)); + log.error( + String.format( + "ERROR: Failed to get task status for %s:%d. See stacktrace for a more detailed error:", + nodeId, taskId)); e.printStackTrace(); } return Optional.empty(); @@ -70,8 +70,8 @@ public Optional getTaskStatus(@Nonnull String nodeId, long task * @param docId the ID of the document */ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { - final UpdateRequest updateRequest = new UpdateRequest( - indexConvention.getIndexName(INDEX_NAME), docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexConvention.getIndexName(INDEX_NAME), docId) .detectNoop(false) .docAsUpsert(true) .doc(document, XContentType.JSON) @@ -80,7 +80,8 @@ public void upsertDocument(@Nonnull String docId, @Nonnull String document) { } public DeleteResponse deleteByDocId(@Nonnull final String docId) { - DeleteRequest deleteRequest = new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); + DeleteRequest deleteRequest = + new DeleteRequest(indexConvention.getIndexName(INDEX_NAME), docId); try { final DeleteResponse deleteResponse = client.delete(deleteRequest, RequestOptions.DEFAULT); @@ -96,24 +97,26 @@ public BulkByScrollResponse deleteByUrn(@Nonnull final String urn) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); - final Optional deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public BulkByScrollResponse deleteByUrnAspect(@Nonnull final String urn, @Nonnull final String aspect) { + public BulkByScrollResponse deleteByUrnAspect( + @Nonnull final String urn, @Nonnull final String aspect) { BoolQueryBuilder finalQuery = QueryBuilders.boolQuery(); finalQuery.must(QueryBuilders.termQuery("urn", urn)); finalQuery.must(QueryBuilders.termQuery("aspect", aspect)); - final Optional deleteResponse = bulkProcessor.deleteByQuery(finalQuery, - indexConvention.getIndexName(INDEX_NAME)); + final Optional deleteResponse = + bulkProcessor.deleteByQuery(finalQuery, indexConvention.getIndexName(INDEX_NAME)); return deleteResponse.orElse(null); } - public SearchResponse findByParams(Map searchParams, boolean includeSoftDeleted, int from, int size) { + public SearchResponse findByParams( + Map searchParams, boolean includeSoftDeleted, int from, int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -147,8 +150,13 @@ public SearchResponse findByParams(Map searchParams, boolean inc } // TODO: Scroll impl for searches bound by 10k limit - public SearchResponse findByParams(Map searchParams, boolean includeSoftDeleted, @Nullable Object[] sort, - @Nullable String pitId, @Nonnull String keepAlive, int size) { + public SearchResponse findByParams( + Map searchParams, + boolean includeSoftDeleted, + @Nullable Object[] sort, + @Nullable String pitId, + @Nonnull String keepAlive, + int size) { SearchRequest searchRequest = new SearchRequest(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -181,8 +189,8 @@ public SearchResponse findByParams(Map searchParams, boolean inc return null; } - public SearchResponse findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public SearchResponse findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map params = new HashMap<>(); params.put("registryName", registryName); params.put("registryVersion", registryVersion); @@ -210,11 +218,13 @@ public SearchResponse findRuns(Integer pageOffset, Integer pageSize) { bucketSort.size(pageSize); bucketSort.from(pageOffset); - TermsAggregationBuilder aggregation = AggregationBuilders.terms("runId") - .field("runId") - .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) - .subAggregation(bucketSort) - .subAggregation(AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); + TermsAggregationBuilder aggregation = + AggregationBuilders.terms("runId") + .field("runId") + .subAggregation(AggregationBuilders.max("maxTimestamp").field("lastUpdated")) + .subAggregation(bucketSort) + .subAggregation( + AggregationBuilders.filter("removed", QueryBuilders.termQuery("removed", "true"))); searchSourceBuilder.aggregation(aggregation); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java index e9ee1d6ee78d5..6fbe7cfe882ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/ElasticSearchSystemMetadataService.java @@ -40,10 +40,10 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.ParsedMax; - @Slf4j @RequiredArgsConstructor -public class ElasticSearchSystemMetadataService implements SystemMetadataService, ElasticSearchIndexed { +public class ElasticSearchSystemMetadataService + implements SystemMetadataService, ElasticSearchIndexed { private final ESBulkProcessor _esBulkProcessor; private final IndexConvention _indexConvention; @@ -58,9 +58,15 @@ public class ElasticSearchSystemMetadataService implements SystemMetadataService private static final String FIELD_LAST_UPDATED = "lastUpdated"; private static final String FIELD_REGISTRY_NAME = "registryName"; private static final String FIELD_REGISTRY_VERSION = "registryVersion"; - private static final Set INDEX_FIELD_SET = new HashSet<>( - Arrays.asList(FIELD_URN, FIELD_ASPECT, FIELD_RUNID, FIELD_LAST_UPDATED, FIELD_REGISTRY_NAME, - FIELD_REGISTRY_VERSION)); + private static final Set INDEX_FIELD_SET = + new HashSet<>( + Arrays.asList( + FIELD_URN, + FIELD_ASPECT, + FIELD_RUNID, + FIELD_LAST_UPDATED, + FIELD_REGISTRY_NAME, + FIELD_REGISTRY_VERSION)); private String toDocument(SystemMetadata systemMetadata, String urn, String aspect) { final ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,12 +118,13 @@ public void setDocStatus(String urn, boolean removed) { final List aspectList = findByParams(ImmutableMap.of("urn", urn), !removed, 0, ESUtils.MAX_RESULT_SIZE); // for each -> toDocId and set removed to true for all - aspectList.forEach(aspect -> { - final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); - final ObjectNode document = JsonNodeFactory.instance.objectNode(); - document.put("removed", removed); - _esDAO.upsertDocument(docId, document.toString()); - }); + aspectList.forEach( + aspect -> { + final String docId = toDocId(aspect.getUrn(), aspect.getAspectName()); + final ObjectNode document = JsonNodeFactory.instance.objectNode(); + document.put("removed", removed); + _esDAO.upsertDocument(docId, document.toString()); + }); } @Override @@ -133,36 +140,44 @@ public void insert(@Nullable SystemMetadata systemMetadata, String urn, String a } @Override - public List findByRunId(String runId, boolean includeSoftDeleted, int from, int size) { - return findByParams(Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); + public List findByRunId( + String runId, boolean includeSoftDeleted, int from, int size) { + return findByParams( + Collections.singletonMap(FIELD_RUNID, runId), includeSoftDeleted, from, size); } @Override - public List findByUrn(String urn, boolean includeSoftDeleted, int from, int size) { + public List findByUrn( + String urn, boolean includeSoftDeleted, int from, int size) { return findByParams(Collections.singletonMap(FIELD_URN, urn), includeSoftDeleted, from, size); } @Override - public List findByParams(Map systemMetaParams, boolean includeSoftDeleted, int from, - int size) { - SearchResponse searchResponse = _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); + public List findByParams( + Map systemMetaParams, boolean includeSoftDeleted, int from, int size) { + SearchResponse searchResponse = + _esDAO.findByParams(systemMetaParams, includeSoftDeleted, from, size); if (searchResponse != null) { SearchHits hits = searchResponse.getHits(); - List summaries = Arrays.stream(hits.getHits()).map(hit -> { - Map values = hit.getSourceAsMap(); - AspectRowSummary summary = new AspectRowSummary(); - summary.setRunId((String) values.get(FIELD_RUNID)); - summary.setAspectName((String) values.get(FIELD_ASPECT)); - summary.setUrn((String) values.get(FIELD_URN)); - Object timestamp = values.get(FIELD_LAST_UPDATED); - if (timestamp instanceof Long) { - summary.setTimestamp((Long) timestamp); - } else if (timestamp instanceof Integer) { - summary.setTimestamp(Long.valueOf((Integer) timestamp)); - } - summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); - return summary; - }).collect(Collectors.toList()); + List summaries = + Arrays.stream(hits.getHits()) + .map( + hit -> { + Map values = hit.getSourceAsMap(); + AspectRowSummary summary = new AspectRowSummary(); + summary.setRunId((String) values.get(FIELD_RUNID)); + summary.setAspectName((String) values.get(FIELD_ASPECT)); + summary.setUrn((String) values.get(FIELD_URN)); + Object timestamp = values.get(FIELD_LAST_UPDATED); + if (timestamp instanceof Long) { + summary.setTimestamp((Long) timestamp); + } else if (timestamp instanceof Integer) { + summary.setTimestamp(Long.valueOf((Integer) timestamp)); + } + summary.setKeyAspect(((String) values.get(FIELD_ASPECT)).endsWith("Key")); + return summary; + }) + .collect(Collectors.toList()); return summaries; } else { return Collections.emptyList(); @@ -170,8 +185,8 @@ public List findByParams(Map systemMetaParams, } @Override - public List findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, - int from, int size) { + public List findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size) { Map registryParams = new HashMap<>(); registryParams.put(FIELD_REGISTRY_NAME, registryName); registryParams.put(FIELD_REGISTRY_VERSION, registryVersion); @@ -179,26 +194,34 @@ public List findByRegistry(String registryName, String registr } @Override - public List listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { + public List listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted) { SearchResponse response = _esDAO.findRuns(pageOffset, pageSize); - List buckets = ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); + List buckets = + ((ParsedStringTerms) response.getAggregations().get("runId")).getBuckets(); if (!includeSoftDeleted) { - buckets.removeIf(bucket -> { - long totalDocs = bucket.getDocCount(); - long softDeletedDocs = ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); - return totalDocs == softDeletedDocs; - }); + buckets.removeIf( + bucket -> { + long totalDocs = bucket.getDocCount(); + long softDeletedDocs = + ((ParsedFilter) bucket.getAggregations().get("removed")).getDocCount(); + return totalDocs == softDeletedDocs; + }); } // TODO(gabe-lyons): add sample urns - return buckets.stream().map(bucket -> { - IngestionRunSummary entry = new IngestionRunSummary(); - entry.setRunId(bucket.getKeyAsString()); - entry.setTimestamp((long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); - entry.setRows(bucket.getDocCount()); - return entry; - }).collect(Collectors.toList()); + return buckets.stream() + .map( + bucket -> { + IngestionRunSummary entry = new IngestionRunSummary(); + entry.setRunId(bucket.getKeyAsString()); + entry.setTimestamp( + (long) ((ParsedMax) bucket.getAggregations().get("maxTimestamp")).getValue()); + entry.setRows(bucket.getDocCount()); + return entry; + }) + .collect(Collectors.toList()); } @Override @@ -215,8 +238,11 @@ public void configure() { @Override public List buildReindexConfigs() throws IOException { - return List.of(_indexBuilder.buildReindexState(_indexConvention.getIndexName(INDEX_NAME), - SystemMetadataMappingsBuilder.getMappings(), Collections.emptyMap())); + return List.of( + _indexBuilder.buildReindexState( + _indexConvention.getIndexName(INDEX_NAME), + SystemMetadataMappingsBuilder.getMappings(), + Collections.emptyMap())); } @Override @@ -227,6 +253,7 @@ public void reindexAll() { @VisibleForTesting @Override public void clear() { - _esBulkProcessor.deleteByQuery(QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); + _esBulkProcessor.deleteByQuery( + QueryBuilders.matchAllQuery(), true, _indexConvention.getIndexName(INDEX_NAME)); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java index c19283aa44ac1..6bce654fb1481 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataEntry.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @AllArgsConstructor @Data public class SystemMetadataEntry { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java index 535610ffbf37f..6623580548706 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataMappingsBuilder.java @@ -5,11 +5,10 @@ import java.util.Map; import lombok.extern.slf4j.Slf4j; - @Slf4j public class SystemMetadataMappingsBuilder { - private SystemMetadataMappingsBuilder() { } + private SystemMetadataMappingsBuilder() {} public static Map getMappings() { Map mappings = new HashMap<>(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java index c0b1239ffa835..0105215565117 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/MissingEntityAspect.java @@ -6,5 +6,4 @@ @Value @EqualsAndHashCode(callSuper = false) -public class MissingEntityAspect extends EntityAspect { -} +public class MissingEntityAspect extends EntityAspect {} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java index 9658d9d3ab036..a93d4880a7979 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/TimelineServiceImpl.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline; +import static com.linkedin.common.urn.VersionedUrnUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -44,24 +47,30 @@ import javax.annotation.Nonnull; import org.apache.commons.collections.CollectionUtils; -import static com.linkedin.common.urn.VersionedUrnUtils.*; -import static com.linkedin.metadata.Constants.*; - public class TimelineServiceImpl implements TimelineService { - private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = 7 * 24 * 60 * 60 * 1000L; // 1 week lookback + private static final long DEFAULT_LOOKBACK_TIME_WINDOW_MILLIS = + 7 * 24 * 60 * 60 * 1000L; // 1 week lookback private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final long FIRST_TRANSACTION_ID = 0; private static final String BUILD_VALUE_COMPUTED = "computed"; private final AspectDao _aspectDao; private final EntityChangeEventGeneratorFactory _entityChangeEventGeneratorFactory; private final EntityRegistry _entityRegistry; - private final HashMap>> entityTypeElementAspectRegistry = new HashMap<>(); + private final HashMap>> + entityTypeElementAspectRegistry = new HashMap<>(); public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry entityRegistry) { this._aspectDao = aspectDao; @@ -76,56 +85,97 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set aspects = new HashSet<>(); switch (elementName) { - case TAG: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(GLOBAL_TAGS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOBAL_TAGS_ASPECT_NAME, - new GlobalTagsChangeEventGenerator()); - } + case TAG: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(GLOBAL_TAGS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOBAL_TAGS_ASPECT_NAME, + new GlobalTagsChangeEventGenerator()); + } break; - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } break; - case DOCUMENTATION: { - aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, INSTITUTIONAL_MEMORY_ASPECT_NAME, - new InstitutionalMemoryChangeEventGenerator()); - aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - new EditableDatasetPropertiesChangeEventGenerator()); - aspects.add(DATASET_PROPERTIES_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, DATASET_PROPERTIES_ASPECT_NAME, - new DatasetPropertiesChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case DOCUMENTATION: + { + aspects.add(INSTITUTIONAL_MEMORY_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + new InstitutionalMemoryChangeEventGenerator()); + aspects.add(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); + aspects.add(DATASET_PROPERTIES_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + DATASET_PROPERTIES_ASPECT_NAME, + new DatasetPropertiesChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; - case GLOSSARY_TERM: { - aspects.add(GLOSSARY_TERMS_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, GLOSSARY_TERMS_ASPECT_NAME, - new GlossaryTermsChangeEventGenerator()); - aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - new EditableSchemaMetadataChangeEventGenerator()); - } + case GLOSSARY_TERM: + { + aspects.add(GLOSSARY_TERMS_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + GLOSSARY_TERMS_ASPECT_NAME, + new GlossaryTermsChangeEventGenerator()); + aspects.add(EDITABLE_SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + new EditableSchemaMetadataChangeEventGenerator()); + } break; - case TECHNICAL_SCHEMA: { - aspects.add(SCHEMA_METADATA_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityType, elementName, SCHEMA_METADATA_ASPECT_NAME, - new SchemaMetadataChangeEventGenerator()); - } + case TECHNICAL_SCHEMA: + { + aspects.add(SCHEMA_METADATA_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityType, + elementName, + SCHEMA_METADATA_ASPECT_NAME, + new SchemaMetadataChangeEventGenerator()); + } break; default: break; @@ -139,25 +189,34 @@ public TimelineServiceImpl(@Nonnull AspectDao aspectDao, @Nonnull EntityRegistry for (ChangeCategory elementName : ChangeCategory.values()) { Set aspects = new HashSet<>(); switch (elementName) { - case OWNER: { - aspects.add(OWNERSHIP_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, OWNERSHIP_ASPECT_NAME, - new OwnershipChangeEventGenerator()); - } - break; - case DOCUMENTATION: { - aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); - _entityChangeEventGeneratorFactory.addGenerator(entityTypeGlossaryTerm, elementName, GLOSSARY_TERM_INFO_ASPECT_NAME, - new GlossaryTermInfoChangeEventGenerator()); - } - break; + case OWNER: + { + aspects.add(OWNERSHIP_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + OWNERSHIP_ASPECT_NAME, + new OwnershipChangeEventGenerator()); + } + break; + case DOCUMENTATION: + { + aspects.add(GLOSSARY_TERM_INFO_ASPECT_NAME); + _entityChangeEventGeneratorFactory.addGenerator( + entityTypeGlossaryTerm, + elementName, + GLOSSARY_TERM_INFO_ASPECT_NAME, + new GlossaryTermInfoChangeEventGenerator()); + } + break; default: break; } glossaryTermElementAspectRegistry.put(elementName, aspects); } entityTypeElementAspectRegistry.put(DATASET_ENTITY_NAME, datasetElementAspectRegistry); - entityTypeElementAspectRegistry.put(GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); + entityTypeElementAspectRegistry.put( + GLOSSARY_TERM_ENTITY_NAME, glossaryTermElementAspectRegistry); } Set getAspectsFromElements(String entityType, Set elementNames) { @@ -173,15 +232,21 @@ Set getAspectsFromElements(String entityType, Set elemen @Nonnull @Override - public List getTimeline(@Nonnull final Urn urn, @Nonnull final Set elementNames, - long startTimeMillis, long endTimeMillis, String startVersionStamp, String endVersionStamp, + public List getTimeline( + @Nonnull final Urn urn, + @Nonnull final Set elementNames, + long startTimeMillis, + long endTimeMillis, + String startVersionStamp, + String endVersionStamp, boolean rawDiffRequested) { Set aspectNames = getAspectsFromElements(urn.getEntityType(), elementNames); // TODO: Add more logic for defaults if (startVersionStamp != null && startTimeMillis != 0) { - throw new IllegalArgumentException("Cannot specify both VersionStamp start and timestamp start"); + throw new IllegalArgumentException( + "Cannot specify both VersionStamp start and timestamp start"); } if (endTimeMillis == 0) { @@ -195,58 +260,67 @@ public List getTimeline(@Nonnull final Urn urn, @Nonnull fina // query EntitySpec entitySpec = _entityRegistry.getEntitySpec(urn.getEntityType()); List aspectSpecs = entitySpec.getAspectSpecs(); - Set fullAspectNames = aspectSpecs.stream() - .filter(aspectSpec -> !aspectSpec.isTimeseries()) - .map(AspectSpec::getName) - .collect(Collectors.toSet()); - List aspectsInRange = this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, - endTimeMillis); + Set fullAspectNames = + aspectSpecs.stream() + .filter(aspectSpec -> !aspectSpec.isTimeseries()) + .map(AspectSpec::getName) + .collect(Collectors.toSet()); + List aspectsInRange = + this._aspectDao.getAspectsInRange(urn, fullAspectNames, startTimeMillis, endTimeMillis); // Prepopulate with all versioned aspectNames -> ignore timeseries using // registry - Map> aspectRowSetMap = constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); + Map> aspectRowSetMap = + constructAspectRowSetMap(urn, fullAspectNames, aspectsInRange); - Map> timestampVersionCache = constructTimestampVersionCache(aspectRowSetMap); + Map> timestampVersionCache = + constructTimestampVersionCache(aspectRowSetMap); // TODO: There are some extra steps happening here, we need to clean up how // transactions get combined across differs - SortedMap> semanticDiffs = aspectRowSetMap.entrySet() - .stream() - .filter(entry -> aspectNames.contains(entry.getKey())) - .map(Map.Entry::getValue) - .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) - .collect(TreeMap::new, this::combineComputedDiffsPerTransactionId, this::combineComputedDiffsPerTransactionId); + SortedMap> semanticDiffs = + aspectRowSetMap.entrySet().stream() + .filter(entry -> aspectNames.contains(entry.getKey())) + .map(Map.Entry::getValue) + .map(value -> computeDiffs(value, urn.getEntityType(), elementNames, rawDiffRequested)) + .collect( + TreeMap::new, + this::combineComputedDiffsPerTransactionId, + this::combineComputedDiffsPerTransactionId); // TODO:Move this down assignSemanticVersions(semanticDiffs); - List changeTransactions = semanticDiffs.values().stream().collect(ArrayList::new, - ArrayList::addAll, ArrayList::addAll); - List combinedChangeTransactions = combineTransactionsByTimestamp(changeTransactions, - timestampVersionCache); + List changeTransactions = + semanticDiffs.values().stream() + .collect(ArrayList::new, ArrayList::addAll, ArrayList::addAll); + List combinedChangeTransactions = + combineTransactionsByTimestamp(changeTransactions, timestampVersionCache); combinedChangeTransactions.sort(Comparator.comparing(ChangeTransaction::getTimestamp)); return combinedChangeTransactions; } /** - * Constructs a map from aspect name to a sorted set of DB aspects by created - * timestamp. Set includes all aspects - * relevant to an entity and does a lookback by 1 for all aspects, creating - * sentinel values for when the oldest aspect - * possible has been retrieved or no value exists in the DB for an aspect - * - * @param urn urn of the entity + * Constructs a map from aspect name to a sorted set of DB aspects by created timestamp. Set + * includes all aspects relevant to an entity and does a lookback by 1 for all aspects, creating + * sentinel values for when the oldest aspect possible has been retrieved or no value exists in + * the DB for an aspect + * + * @param urn urn of the entity * @param fullAspectNames full list of aspects relevant to the entity - * @param aspectsInRange aspects returned by the range query by timestampm + * @param aspectsInRange aspects returned by the range query by timestampm * @return map constructed as described */ - private Map> constructAspectRowSetMap(Urn urn, Set fullAspectNames, - List aspectsInRange) { + private Map> constructAspectRowSetMap( + Urn urn, Set fullAspectNames, List aspectsInRange) { Map> aspectRowSetMap = new HashMap<>(); fullAspectNames.forEach( - aspectName -> aspectRowSetMap.put(aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); - aspectsInRange.forEach(row -> { - TreeSet rowList = aspectRowSetMap.get(row.getAspect()); - rowList.add(row); - }); + aspectName -> + aspectRowSetMap.put( + aspectName, new TreeSet<>(Comparator.comparing(EntityAspect::getCreatedOn)))); + aspectsInRange.forEach( + row -> { + TreeSet rowList = aspectRowSetMap.get(row.getAspect()); + rowList.add(row); + }); // we need to pull previous versions of these aspects that are currently at a 0 Map nextVersions = _aspectDao.getNextVersions(urn.toString(), fullAspectNames); @@ -267,9 +341,11 @@ private Map> constructAspectRowSetMap(Urn urn, Set // get the next version long versionToGet = 0; if (oldestAspect != null) { - versionToGet = (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; + versionToGet = + (oldestAspect.getVersion() == 0L) ? nextVersion - 1 : oldestAspect.getVersion() - 1; } - EntityAspect row = _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); + EntityAspect row = + _aspectDao.getAspect(urn.toString(), aspectMinVersion.getKey(), versionToGet); if (row != null) { aspectRowSetMap.get(row.getAspect()).add(row); } else { @@ -281,8 +357,7 @@ private Map> constructAspectRowSetMap(Urn urn, Set } private boolean isOldestPossible(EntityAspect oldestAspect, long nextVersion) { - return (((oldestAspect.getVersion() == 0L) - && (nextVersion == 1L)) + return (((oldestAspect.getVersion() == 0L) && (nextVersion == 1L)) || (oldestAspect.getVersion() == 1L)); } @@ -295,19 +370,20 @@ private MissingEntityAspect createSentinel(String aspectName) { } /** - * Constructs a map from timestamp to a sorted map of aspect name -> version for - * use in constructing the version stamp - * - * @param aspectRowSetMap map constructed as described in - * {@link TimelineServiceImpl#constructAspectRowSetMap} + * Constructs a map from timestamp to a sorted map of aspect name -> version for use in + * constructing the version stamp + * + * @param aspectRowSetMap map constructed as described in {@link + * TimelineServiceImpl#constructAspectRowSetMap} * @return map as described */ private Map> constructTimestampVersionCache( Map> aspectRowSetMap) { - Set aspects = aspectRowSetMap.values().stream() - .flatMap(TreeSet::stream) - .filter(aspect -> aspect.getVersion() != -1L) - .collect(Collectors.toSet()); + Set aspects = + aspectRowSetMap.values().stream() + .flatMap(TreeSet::stream) + .filter(aspect -> aspect.getVersion() != -1L) + .collect(Collectors.toSet()); Map> timestampVersionCache = new HashMap<>(); for (EntityAspect aspect : aspects) { if (timestampVersionCache.containsKey(aspect.getCreatedOn().getTime())) { @@ -341,8 +417,11 @@ private Map> constructTimestampVersionCache( return timestampVersionCache; } - private SortedMap> computeDiffs(TreeSet aspectTimeline, - String entityType, Set elementNames, boolean rawDiffsRequested) { + private SortedMap> computeDiffs( + TreeSet aspectTimeline, + String entityType, + Set elementNames, + boolean rawDiffsRequested) { EntityAspect previousValue = null; SortedMap> changeTransactionsMap = new TreeMap<>(); long transactionId; @@ -350,7 +429,8 @@ private SortedMap> computeDiffs(TreeSet> computeDiffs(TreeSet computeDiff(@Nonnull EntityAspect previousValue, @Nonnull EntityAspect currentValue, - String entityType, Set elementNames, boolean rawDiffsRequested) { + private List computeDiff( + @Nonnull EntityAspect previousValue, + @Nonnull EntityAspect currentValue, + String entityType, + Set elementNames, + boolean rawDiffsRequested) { String aspectName = currentValue.getAspect(); List semanticChangeTransactions = new ArrayList<>(); JsonPatch rawDiff = getRawDiff(previousValue, currentValue); for (ChangeCategory element : elementNames) { EntityChangeEventGenerator entityChangeEventGenerator; - entityChangeEventGenerator = _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); + entityChangeEventGenerator = + _entityChangeEventGeneratorFactory.getGenerator(entityType, element, aspectName); if (entityChangeEventGenerator != null) { try { ChangeTransaction changeTransaction = - entityChangeEventGenerator.getSemanticDiff(previousValue, currentValue, element, rawDiff, - rawDiffsRequested); + entityChangeEventGenerator.getSemanticDiff( + previousValue, currentValue, element, rawDiff, rawDiffsRequested); if (CollectionUtils.isNotEmpty(changeTransaction.getChangeEvents())) { semanticChangeTransactions.add(changeTransaction); } } catch (Exception e) { - semanticChangeTransactions.add(ChangeTransaction.builder() - .semVerChange(SemanticChangeType.EXCEPTIONAL) - .changeEvents(Collections.singletonList(ChangeEvent.builder() - .description(String.format("%s:%s", e.getClass().getName(), e.getMessage())) - .build())) - .build()); + semanticChangeTransactions.add( + ChangeTransaction.builder() + .semVerChange(SemanticChangeType.EXCEPTIONAL) + .changeEvents( + Collections.singletonList( + ChangeEvent.builder() + .description( + String.format("%s:%s", e.getClass().getName(), e.getMessage())) + .build())) + .build()); } } } @@ -401,7 +490,8 @@ private JsonPatch getRawDiff(EntityAspect previousValue, EntityAspect currentVal } } - private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap> semanticDiffs, + private void combineComputedDiffsPerTransactionId( + @Nonnull SortedMap> semanticDiffs, @Nonnull SortedMap> computedDiffs) { for (Map.Entry> entry : computedDiffs.entrySet()) { if (!semanticDiffs.containsKey(entry.getKey())) { @@ -414,18 +504,22 @@ private void combineComputedDiffsPerTransactionId(@Nonnull SortedMap> changeTransactionsMap) { + private void assignSemanticVersions( + SortedMap> changeTransactionsMap) { SemanticVersion curGroupVersion = null; long transactionId = FIRST_TRANSACTION_ID - 1; for (Map.Entry> entry : changeTransactionsMap.entrySet()) { if (transactionId >= entry.getKey()) { - throw new IllegalArgumentException(String.format("transactionId should be < previous. %s >= %s", - transactionId, entry.getKey())); + throw new IllegalArgumentException( + String.format( + "transactionId should be < previous. %s >= %s", transactionId, entry.getKey())); } transactionId = entry.getKey(); SemanticChangeType highestChangeInGroup = SemanticChangeType.NONE; - ChangeTransaction highestChangeTransaction = entry.getValue().stream() - .max(Comparator.comparing(ChangeTransaction::getSemVerChange)).orElse(null); + ChangeTransaction highestChangeTransaction = + entry.getValue().stream() + .max(Comparator.comparing(ChangeTransaction::getSemVerChange)) + .orElse(null); if (highestChangeTransaction != null) { highestChangeInGroup = highestChangeTransaction.getSemVerChange(); } @@ -436,8 +530,8 @@ private void assignSemanticVersions(SortedMap> cha } } - private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChangeInGroup, - SemanticVersion previousVersion) { + private SemanticVersion getGroupSemanticVersion( + SemanticChangeType highestChangeInGroup, SemanticVersion previousVersion) { if (previousVersion == null) { // Start with all 0s if there is no previous version. return SemanticVersion.builder() @@ -477,10 +571,11 @@ private SemanticVersion getGroupSemanticVersion(SemanticChangeType highestChange return previousVersion; } - private List combineTransactionsByTimestamp(List changeTransactions, + private List combineTransactionsByTimestamp( + List changeTransactions, Map> timestampVersionCache) { - Map> transactionsByTimestamp = changeTransactions.stream() - .collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); + Map> transactionsByTimestamp = + changeTransactions.stream().collect(Collectors.groupingBy(ChangeTransaction::getTimestamp)); List combinedChangeTransactions = new ArrayList<>(); for (List transactionList : transactionsByTimestamp.values()) { if (!transactionList.isEmpty()) { @@ -490,14 +585,17 @@ private List combineTransactionsByTimestamp(List= 0 - ? maxSemanticChangeType - : element.getSemVerChange(); - maxSemVer = maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); + maxSemanticChangeType = + maxSemanticChangeType.compareTo(element.getSemVerChange()) >= 0 + ? maxSemanticChangeType + : element.getSemVerChange(); + maxSemVer = + maxSemVer.compareTo(element.getSemVer()) >= 0 ? maxSemVer : element.getSemVer(); } result.setSemVerChange(maxSemanticChangeType); result.setSemanticVersion(maxSemVer); - result.setVersionStamp(constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); + result.setVersionStamp( + constructVersionStamp(timestampVersionCache.get(result.getTimestamp()))); combinedChangeTransactions.add(result); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java index a9c5d56a7e445..84308d9b2311f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/DatasetSchemaFieldChangeEvent.java @@ -27,8 +27,7 @@ public DatasetSchemaFieldChangeEvent( String description, String fieldPath, Urn fieldUrn, - boolean nullable - ) { + boolean nullable) { super( entityUrn, category, @@ -37,12 +36,9 @@ public DatasetSchemaFieldChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "fieldUrn", fieldUrn.toString(), - "nullable", nullable - ), + "nullable", nullable), auditStamp, semVerChange, - description - ); + description); } } - diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java index eddacf3714f61..f1d9862fb33a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldGlossaryTermChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldGlossaryTermChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldGlossaryTermChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "termUrn", termUrn.toString() - ), + "termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java index b4553f9048e3a..10e6ae6ca4af8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/dataset/schema/SchemaFieldTagChangeEvent.java @@ -4,8 +4,8 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeCategory; -import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.ChangeEvent; +import com.linkedin.metadata.timeline.data.ChangeOperation; import com.linkedin.metadata.timeline.data.SemanticChangeType; import lombok.Builder; import lombok.EqualsAndHashCode; @@ -27,8 +27,7 @@ public SchemaFieldTagChangeEvent( String description, String fieldPath, Urn parentUrn, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, @@ -37,11 +36,9 @@ public SchemaFieldTagChangeEvent( ImmutableMap.of( "fieldPath", fieldPath, "parentUrn", parentUrn.toString(), - "tagUrn", tagUrn.toString() - ), + "tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } -} \ No newline at end of file +} diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java index 5a306635f6a81..33dfdb68cb9e6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/DomainChangeEvent.java @@ -12,7 +12,6 @@ import lombok.Getter; import lombok.Value; - @EqualsAndHashCode(callSuper = true) @Value @Getter @@ -26,19 +25,15 @@ public DomainChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn domainUrn - ) { + Urn domainUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "domainUrn", domainUrn.toString() - ), + ImmutableMap.of("domainUrn", domainUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java index dfa659dad67b1..564dc63c1a678 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/GlossaryTermChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public GlossaryTermChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn termUrn - ) { + Urn termUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "termUrn", termUrn.toString() - ), + ImmutableMap.of("termUrn", termUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java index b9efe7113bcfb..fc4f0327b7704 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/OwnerChangeEvent.java @@ -27,8 +27,7 @@ public OwnerChangeEvent( SemanticChangeType semVerChange, String description, Urn ownerUrn, - OwnershipType ownerType - ) { + OwnershipType ownerType) { super( entityUrn, category, @@ -36,11 +35,9 @@ public OwnerChangeEvent( modifier, ImmutableMap.of( "ownerUrn", ownerUrn.toString(), - "ownerType", ownerType.toString() - ), + "ownerType", ownerType.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java index 09dba21ff3988..b19a4a1558ab6 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/data/entity/TagChangeEvent.java @@ -13,7 +13,6 @@ import lombok.Value; import lombok.experimental.NonFinal; - @EqualsAndHashCode(callSuper = true) @Value @NonFinal @@ -28,19 +27,15 @@ public TagChangeEvent( AuditStamp auditStamp, SemanticChangeType semVerChange, String description, - Urn tagUrn - ) { + Urn tagUrn) { super( entityUrn, category, operation, modifier, - ImmutableMap.of( - "tagUrn", tagUrn.toString() - ), + ImmutableMap.of("tagUrn", tagUrn.toString()), auditStamp, semVerChange, - description - ); + description); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java index 60008826afc61..f83eded55ff9c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/Aspect.java @@ -5,20 +5,13 @@ import lombok.AllArgsConstructor; import lombok.Value; - -/** - * Thin wrapper for an aspect value which is used within the Entity Change Event API. - */ +/** Thin wrapper for an aspect value which is used within the Entity Change Event API. */ @Value @AllArgsConstructor public class Aspect { - /** - * The aspect value itself. - */ + /** The aspect value itself. */ T value; - /** - * System metadata - */ + /** System metadata */ SystemMetadata systemMetadata; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java index b615189fe7314..1a8e54e5baf4a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/AssertionRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSortedMap; import com.linkedin.assertion.AssertionResult; import com.linkedin.assertion.AssertionRunEvent; @@ -14,10 +16,8 @@ import java.util.Map; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class AssertionRunEventChangeEventGenerator extends EntityChangeEventGenerator { +public class AssertionRunEventChangeEventGenerator + extends EntityChangeEventGenerator { @Override public List getChangeEvents( @Nonnull Urn urn, @@ -39,22 +39,22 @@ private List computeDiffs( boolean isNewCompleted = isCompleted(newAspect); if (isNewCompleted && !isPreviousCompleted) { - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.COMPLETED) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect)) - .build()); + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.COMPLETED) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect)) + .build()); } return Collections.emptyList(); } private boolean isCompleted(final AssertionRunEvent assertionRunEvent) { - return assertionRunEvent != null && assertionRunEvent.getStatus() - .toString() - .equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); + return assertionRunEvent != null + && assertionRunEvent.getStatus().toString().equals(ASSERTION_RUN_EVENT_STATUS_COMPLETE); } @Nonnull diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java index e5237cc5abc39..f6192294e5701 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/ChangeEventGeneratorUtils.java @@ -13,25 +13,29 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ChangeEventGeneratorUtils { - public static Urn getSchemaFieldUrn(@Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); + public static Urn getSchemaFieldUrn( + @Nonnull String datasetUrnStr, @Nonnull String schemaFieldPath) { + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrnStr, schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull String schemaFieldPath) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn.toString(), schemaFieldPath)); } public static Urn getSchemaFieldUrn(@Nonnull Urn datasetUrn, @Nonnull SchemaField schemaField) { - return UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); + return UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", datasetUrn, getFieldPathV1(schemaField))); } public static String getFieldPathV1(@Nonnull SchemaField field) { - String[] v1PathTokens = Arrays.stream(field.getFieldPath().split("\\.")) - .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) - .toArray(String[]::new); + String[] v1PathTokens = + Arrays.stream(field.getFieldPath().split("\\.")) + .filter(x -> !(x.startsWith("[") || x.endsWith("]"))) + .toArray(String[]::new); return String.join(".", v1PathTokens); } @@ -42,19 +46,22 @@ public static List convertEntityTagChangeEvents( return entityTagChangeEvents.stream() .filter(entityTagChangeEvent -> entityTagChangeEvent instanceof TagChangeEvent) .map(entityTagChangeEvent -> (TagChangeEvent) entityTagChangeEvent) - .map(entityTagChangeEvent -> - SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() - .modifier(entityTagChangeEvent.getModifier()) - .entityUrn(entityTagChangeEvent.getEntityUrn()) - .category(entityTagChangeEvent.getCategory()) - .operation(entityTagChangeEvent.getOperation()) - .semVerChange(entityTagChangeEvent.getSemVerChange()) - .description(entityTagChangeEvent.getDescription()) - .tagUrn(UrnUtils.getUrn((String) entityTagChangeEvent.getParameters().get("tagUrn"))) - .auditStamp(entityTagChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .map( + entityTagChangeEvent -> + SchemaFieldTagChangeEvent.schemaFieldTagChangeEventBuilder() + .modifier(entityTagChangeEvent.getModifier()) + .entityUrn(entityTagChangeEvent.getEntityUrn()) + .category(entityTagChangeEvent.getCategory()) + .operation(entityTagChangeEvent.getOperation()) + .semVerChange(entityTagChangeEvent.getSemVerChange()) + .description(entityTagChangeEvent.getDescription()) + .tagUrn( + UrnUtils.getUrn( + (String) entityTagChangeEvent.getParameters().get("tagUrn"))) + .auditStamp(entityTagChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } @@ -63,23 +70,30 @@ public static List convertEntityGlossaryTermChangeEvents( @Nonnull Urn parentUrn, @Nonnull List entityGlossaryTermChangeEvents) { return entityGlossaryTermChangeEvents.stream() - .filter(entityGlossaryTermChangeEvent -> entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) - .map(entityGlossaryTermChangeEvent -> - SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() - .modifier(entityGlossaryTermChangeEvent.getModifier()) - .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) - .category(entityGlossaryTermChangeEvent.getCategory()) - .operation(entityGlossaryTermChangeEvent.getOperation()) - .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) - .description(entityGlossaryTermChangeEvent.getDescription()) - .termUrn(UrnUtils.getUrn((String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) - .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) - .fieldPath(fieldPath) - .parentUrn(parentUrn) - .build()) + .filter( + entityGlossaryTermChangeEvent -> + entityGlossaryTermChangeEvent instanceof GlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + (GlossaryTermChangeEvent) entityGlossaryTermChangeEvent) + .map( + entityGlossaryTermChangeEvent -> + SchemaFieldGlossaryTermChangeEvent.schemaFieldGlossaryTermChangeEventBuilder() + .modifier(entityGlossaryTermChangeEvent.getModifier()) + .entityUrn(entityGlossaryTermChangeEvent.getEntityUrn()) + .category(entityGlossaryTermChangeEvent.getCategory()) + .operation(entityGlossaryTermChangeEvent.getOperation()) + .semVerChange(entityGlossaryTermChangeEvent.getSemVerChange()) + .description(entityGlossaryTermChangeEvent.getDescription()) + .termUrn( + UrnUtils.getUrn( + (String) entityGlossaryTermChangeEvent.getParameters().get("termUrn"))) + .auditStamp(entityGlossaryTermChangeEvent.getAuditStamp()) + .fieldPath(fieldPath) + .parentUrn(parentUrn) + .build()) .collect(Collectors.toList()); } - private ChangeEventGeneratorUtils() { } + private ChangeEventGeneratorUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java index a3e5a051a47e3..ca30060b5ed29 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DataProcessInstanceRunEventChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.dataprocess.DataProcessInstanceRelationships; @@ -18,15 +20,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class DataProcessInstanceRunEventChangeEventGenerator extends EntityChangeEventGenerator { private static final String COMPLETED_STATUS = "COMPLETED"; private static final String STARTED_STATUS = "STARTED"; - public DataProcessInstanceRunEventChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { + public DataProcessInstanceRunEventChangeEventGenerator( + @Nonnull final SystemEntityClient entityClient) { super(entityClient); } @@ -50,15 +50,17 @@ private List computeDiffs( final DataProcessRunStatus newStatus = getStatus(newAspect); if (newStatus != null && !newStatus.equals(previousStatus)) { - String operationType = newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; - - return Collections.singletonList(ChangeEvent.builder() - .category(ChangeCategory.RUN) - .operation(ChangeOperation.valueOf(operationType)) - .auditStamp(auditStamp) - .entityUrn(entityUrn) - .parameters(buildParameters(newAspect, entityUrn)) - .build()); + String operationType = + newStatus.equals(DataProcessRunStatus.COMPLETE) ? COMPLETED_STATUS : STARTED_STATUS; + + return Collections.singletonList( + ChangeEvent.builder() + .category(ChangeCategory.RUN) + .operation(ChangeOperation.valueOf(operationType)) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .parameters(buildParameters(newAspect, entityUrn)) + .build()); } return Collections.emptyList(); @@ -70,8 +72,8 @@ private DataProcessRunStatus getStatus(DataProcessInstanceRunEvent dataProcessIn } @Nonnull - private Map buildParameters(@Nonnull final DataProcessInstanceRunEvent runEvent, - @Nonnull final String entityUrnString) { + private Map buildParameters( + @Nonnull final DataProcessInstanceRunEvent runEvent, @Nonnull final String entityUrnString) { final Map parameters = new HashMap<>(); if (runEvent.hasAttempt()) { parameters.put(ATTEMPT_KEY, runEvent.getAttempt()); @@ -106,8 +108,9 @@ private DataProcessInstanceRelationships getRelationships(@Nonnull final String EntityResponse entityResponse; try { entityUrn = Urn.createFromString(entityUrnString); - entityResponse = _entityClient.getV2(entityUrn, - Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); + entityResponse = + _entityClient.getV2( + entityUrn, Collections.singleton(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME)); } catch (Exception e) { return null; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java index ddfa6530c6999..850df81675b8c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,46 +20,55 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - - -public class DatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator { - private static List computeDiffs(DatasetProperties baseDatasetProperties, - @Nonnull DatasetProperties targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { +public class DatasetPropertiesChangeEventGenerator + extends EntityChangeEventGenerator { + private static List computeDiffs( + DatasetProperties baseDatasetProperties, + @Nonnull DatasetProperties targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); } else if (baseDescription != null && targetDescription == null) { // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } return changeEvents; } @@ -70,17 +82,23 @@ private static DatasetProperties getDatasetPropertiesFromAspect(EntityAspect ent } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(DATASET_PROPERTIES_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(DATASET_PROPERTIES_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + DATASET_PROPERTIES_ASPECT_NAME); } List changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { DatasetProperties baseDatasetProperties = getDatasetPropertiesFromAspect(previousValue); DatasetProperties targetDatasetProperties = getDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java index 3ef6f51f99203..59516bfae0533 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/DeprecationChangeEventGenerator.java @@ -12,14 +12,16 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class DeprecationChangeEventGenerator extends EntityChangeEventGenerator { @Override - public List getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect from, @Nonnull Aspect to, @Nonnull AuditStamp auditStamp) { + public List getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect from, + @Nonnull Aspect to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } @@ -31,19 +33,21 @@ private List computeDiffs( // Ensure that it is the deprecation status which has actually been changed. - // If the entity was not previously deprecated, but is now deprecated, then return a deprecated event. + // If the entity was not previously deprecated, but is now deprecated, then return a deprecated + // event. if (!isDeprecated(baseDeprecation) && isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.DEPRECATION) - .operation(ChangeOperation.MODIFY) - .entityUrn(entityUrn) - .auditStamp(auditStamp) - .parameters(ImmutableMap.of("status", "DEPRECATED")) - .build()); + .category(ChangeCategory.DEPRECATION) + .operation(ChangeOperation.MODIFY) + .entityUrn(entityUrn) + .auditStamp(auditStamp) + .parameters(ImmutableMap.of("status", "DEPRECATED")) + .build()); } - // If the entity was previously deprecated, but is not not deprecated, then return a un-deprecated event. + // If the entity was previously deprecated, but is not not deprecated, then return a + // un-deprecated event. if (isDeprecated(baseDeprecation) && !isDeprecated(targetDeprecation)) { return Collections.singletonList( ChangeEvent.builder() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java index a10565a7c958b..1ffcd3cfc2ba4 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableDatasetPropertiesChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,30 +18,37 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class EditableDatasetPropertiesChangeEventGenerator extends EntityChangeEventGenerator { public static final String DESCRIPTION_ADDED = "Documentation for '%s' has been added: '%s'."; public static final String DESCRIPTION_REMOVED = "Documentation for '%s' has been removed: '%s'."; - public static final String DESCRIPTION_CHANGED = "Documentation of '%s' has been changed from '%s' to '%s'."; + public static final String DESCRIPTION_CHANGED = + "Documentation of '%s' has been changed from '%s' to '%s'."; - private static List computeDiffs(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { + private static List computeDiffs( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); ChangeEvent descriptionChangeEvent = - getDescriptionChangeEvent(baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); + getDescriptionChangeEvent( + baseDatasetProperties, targetDatasetProperties, entityUrn, auditStamp); if (descriptionChangeEvent != null) { changeEvents.add(descriptionChangeEvent); } return changeEvents; } - private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties baseDatasetProperties, - EditableDatasetProperties targetDatasetProperties, String entityUrn, AuditStamp auditStamp) { - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; + private static ChangeEvent getDescriptionChangeEvent( + EditableDatasetProperties baseDatasetProperties, + EditableDatasetProperties targetDatasetProperties, + String entityUrn, + AuditStamp auditStamp) { + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDescription() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDescription() : null; if (baseDescription == null && targetDescription != null) { // Description added return ChangeEvent.builder() @@ -60,45 +69,59 @@ private static ChangeEvent getDescriptionChangeEvent(EditableDatasetProperties b .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) .auditStamp(auditStamp) .build(); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { // Description has been modified. return ChangeEvent.builder() .entityUrn(entityUrn) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect(EntityAspect entityAspect) { + private static EditableDatasetProperties getEditableDatasetPropertiesFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(EditableDatasetProperties.class, entityAspect.getMetadata()); + return RecordUtils.toRecordTemplate( + EditableDatasetProperties.class, entityAspect.getMetadata()); } return null; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); + if (!previousValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)) { + throw new IllegalArgumentException( + "Aspect is not " + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME); } List changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - EditableDatasetProperties baseDatasetProperties = getEditableDatasetPropertiesFromAspect(previousValue); - EditableDatasetProperties targetDatasetProperties = getEditableDatasetPropertiesFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); + EditableDatasetProperties baseDatasetProperties = + getEditableDatasetPropertiesFromAspect(previousValue); + EditableDatasetProperties targetDatasetProperties = + getEditableDatasetPropertiesFromAspect(currentValue); + changeEvents.addAll( + computeDiffs( + baseDatasetProperties, targetDatasetProperties, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java index 4a1de4c3421ed..1f094bb6ca989 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EditableSchemaMetadataChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -16,7 +19,6 @@ import com.linkedin.schema.EditableSchemaFieldInfo; import com.linkedin.schema.EditableSchemaFieldInfoArray; import com.linkedin.schema.EditableSchemaMetadata; - import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -27,11 +29,8 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - -public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator { +public class EditableSchemaMetadataChangeEventGenerator + extends EntityChangeEventGenerator { public static final String FIELD_DOCUMENTATION_ADDED_FORMAT = "Documentation for the field '%s' of '%s' has been added: '%s'"; public static final String FIELD_DOCUMENTATION_REMOVED_FORMAT = @@ -42,45 +41,59 @@ public class EditableSchemaMetadataChangeEventGenerator extends EntityChangeEven Stream.of(ChangeCategory.DOCUMENTATION, ChangeCategory.TAG, ChangeCategory.GLOSSARY_TERM) .collect(Collectors.toSet()); - private static void sortEditableSchemaMetadataByFieldPath(EditableSchemaMetadata editableSchemaMetadata) { + private static void sortEditableSchemaMetadataByFieldPath( + EditableSchemaMetadata editableSchemaMetadata) { if (editableSchemaMetadata == null) { return; } List editableSchemaFieldInfos = new ArrayList<>(editableSchemaMetadata.getEditableSchemaFieldInfo()); editableSchemaFieldInfos.sort(Comparator.comparing(EditableSchemaFieldInfo::getFieldPath)); - editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); + editableSchemaMetadata.setEditableSchemaFieldInfo( + new EditableSchemaFieldInfoArray(editableSchemaFieldInfos)); } - private static List getAllChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, String entityUrn, ChangeCategory changeCategory, + private static List getAllChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + String entityUrn, + ChangeCategory changeCategory, AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); Urn datasetFieldUrn = getDatasetFieldUrn(baseFieldInfo, targetFieldInfo, entityUrn); if (changeCategory == ChangeCategory.DOCUMENTATION) { - ChangeEvent documentationChangeEvent = getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); + ChangeEvent documentationChangeEvent = + getDocumentationChangeEvent(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp); if (documentationChangeEvent != null) { changeEvents.add(documentationChangeEvent); } } if (changeCategory == ChangeCategory.TAG) { - changeEvents.addAll(getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getTagChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } if (changeCategory == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); + changeEvents.addAll( + getGlossaryTermChangeEvents(baseFieldInfo, targetFieldInfo, datasetFieldUrn, auditStamp)); } return changeEvents; } - private static List computeDiffs(EditableSchemaMetadata baseEditableSchemaMetadata, - EditableSchemaMetadata targetEditableSchemaMetadata, String entityUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List computeDiffs( + EditableSchemaMetadata baseEditableSchemaMetadata, + EditableSchemaMetadata targetEditableSchemaMetadata, + String entityUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { sortEditableSchemaMetadataByFieldPath(baseEditableSchemaMetadata); sortEditableSchemaMetadataByFieldPath(targetEditableSchemaMetadata); List changeEvents = new ArrayList<>(); EditableSchemaFieldInfoArray baseFieldInfos = - (baseEditableSchemaMetadata != null) ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() + (baseEditableSchemaMetadata != null) + ? baseEditableSchemaMetadata.getEditableSchemaFieldInfo() : new EditableSchemaFieldInfoArray(); - EditableSchemaFieldInfoArray targetFieldInfos = targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); + EditableSchemaFieldInfoArray targetFieldInfos = + targetEditableSchemaMetadata.getEditableSchemaFieldInfo(); int baseIdx = 0; int targetIdx = 0; while (baseIdx < baseFieldInfos.size() && targetIdx < targetFieldInfos.size()) { @@ -88,16 +101,20 @@ private static List computeDiffs(EditableSchemaMetadata baseEditabl EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); int comparison = baseFieldInfo.getFieldPath().compareTo(targetFieldInfo.getFieldPath()); if (comparison == 0) { - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents( + baseFieldInfo, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++baseIdx; ++targetIdx; } else if (comparison < 0) { // EditableFieldInfo got removed. - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } else { // EditableFieldInfo got added. - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } } @@ -105,29 +122,36 @@ private static List computeDiffs(EditableSchemaMetadata baseEditabl while (baseIdx < baseFieldInfos.size()) { // Handle removed baseFieldInfo EditableSchemaFieldInfo baseFieldInfo = baseFieldInfos.get(baseIdx); - changeEvents.addAll(getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(baseFieldInfo, null, entityUrn, changeCategory, auditStamp)); ++baseIdx; } while (targetIdx < targetFieldInfos.size()) { // Handle newly added targetFieldInfo EditableSchemaFieldInfo targetFieldInfo = targetFieldInfos.get(targetIdx); - changeEvents.addAll(getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); + changeEvents.addAll( + getAllChangeEvents(null, targetFieldInfo, entityUrn, changeCategory, auditStamp)); ++targetIdx; } return changeEvents; } - private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect(EntityAspect entityAspect) { + private static EditableSchemaMetadata getEditableSchemaMetadataFromAspect( + EntityAspect entityAspect) { if (entityAspect != null && entityAspect.getMetadata() != null) { return RecordUtils.toRecordTemplate(EditableSchemaMetadata.class, entityAspect.getMetadata()); } return null; } - private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDocumentationChangeEvent( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { String baseFieldDescription = (baseFieldInfo != null) ? baseFieldInfo.getDescription() : null; - String targetFieldDescription = (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; + String targetFieldDescription = + (targetFieldInfo != null) ? targetFieldInfo.getDescription() : null; if (baseFieldDescription == null && targetFieldDescription != null) { return ChangeEvent.builder() @@ -136,8 +160,12 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.ADD) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_ADDED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_ADDED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -149,23 +177,32 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.REMOVE) .semVerChange(SemanticChangeType.MINOR) - .description(String.format(FIELD_DOCUMENTATION_REMOVED_FORMAT, + .description( + String.format( + FIELD_DOCUMENTATION_REMOVED_FORMAT, Optional.ofNullable(targetFieldInfo).map(EditableSchemaFieldInfo::getFieldPath), - datasetFieldUrn, baseFieldDescription)) + datasetFieldUrn, + baseFieldDescription)) .auditStamp(auditStamp) .build(); } - if (baseFieldDescription != null && targetFieldDescription != null && !baseFieldDescription.equals( - targetFieldDescription)) { + if (baseFieldDescription != null + && targetFieldDescription != null + && !baseFieldDescription.equals(targetFieldDescription)) { return ChangeEvent.builder() .modifier(targetFieldInfo.getFieldPath()) .entityUrn(datasetFieldUrn.toString()) .category(ChangeCategory.DOCUMENTATION) .operation(ChangeOperation.MODIFY) .semVerChange(SemanticChangeType.PATCH) - .description(String.format(FIELD_DOCUMENTATION_UPDATED_FORMAT, targetFieldInfo.getFieldPath(), datasetFieldUrn, - baseFieldDescription, targetFieldDescription)) + .description( + String.format( + FIELD_DOCUMENTATION_UPDATED_FORMAT, + targetFieldInfo.getFieldPath(), + datasetFieldUrn, + baseFieldDescription, + targetFieldDescription)) .auditStamp(auditStamp) .build(); } @@ -173,69 +210,86 @@ private static ChangeEvent getDocumentationChangeEvent(EditableSchemaFieldInfo b return null; } - private static List getGlossaryTermChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { - GlossaryTerms baseGlossaryTerms = (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; - GlossaryTerms targetGlossaryTerms = (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; + private static List getGlossaryTermChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { + GlossaryTerms baseGlossaryTerms = + (baseFieldInfo != null) ? baseFieldInfo.getGlossaryTerms() : null; + GlossaryTerms targetGlossaryTerms = + (targetFieldInfo != null) ? targetFieldInfo.getGlossaryTerms() : null; // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseGlossaryTerms, targetGlossaryTerms, - datasetFieldUrn.toString(), auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseGlossaryTerms, targetGlossaryTerms, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. return convertEntityGlossaryTermChangeEvents( - fieldPath, - datasetFieldUrn, - entityGlossaryTermsChangeEvents); + fieldPath, datasetFieldUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List getTagChangeEvents(EditableSchemaFieldInfo baseFieldInfo, - EditableSchemaFieldInfo targetFieldInfo, Urn datasetFieldUrn, AuditStamp auditStamp) { + private static List getTagChangeEvents( + EditableSchemaFieldInfo baseFieldInfo, + EditableSchemaFieldInfo targetFieldInfo, + Urn datasetFieldUrn, + AuditStamp auditStamp) { GlobalTags baseGlobalTags = (baseFieldInfo != null) ? baseFieldInfo.getGlobalTags() : null; - GlobalTags targetGlobalTags = (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; + GlobalTags targetGlobalTags = + (targetFieldInfo != null) ? targetFieldInfo.getGlobalTags() : null; // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), - auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseGlobalTags, targetGlobalTags, datasetFieldUrn.toString(), auditStamp); if (targetFieldInfo != null || baseFieldInfo != null) { - String fieldPath = targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); + String fieldPath = + targetFieldInfo != null ? targetFieldInfo.getFieldPath() : baseFieldInfo.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. - return convertEntityTagChangeEvents( - fieldPath, - datasetFieldUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, datasetFieldUrn, entityTagChangeEvents); } return Collections.emptyList(); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(EDITABLE_SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - EditableSchemaMetadata baseEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(previousValue); - EditableSchemaMetadata targetEditableSchemaMetadata = getEditableSchemaMetadataFromAspect(currentValue); + EditableSchemaMetadata baseEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(previousValue); + EditableSchemaMetadata targetEditableSchemaMetadata = + getEditableSchemaMetadataFromAspect(currentValue); List changeEvents = new ArrayList<>(); if (SUPPORTED_CATEGORIES.contains(element)) { changeEvents.addAll( - computeDiffs(baseEditableSchemaMetadata, targetEditableSchemaMetadata, currentValue.getUrn(), element, null)); + computeDiffs( + baseEditableSchemaMetadata, + targetEditableSchemaMetadata, + currentValue.getUrn(), + element, + null)); } // Assess the highest change at the transaction(schema) level. @@ -264,14 +318,37 @@ public List getChangeEvents( @Nonnull Aspect to, @Nonnull AuditStamp auditStamp) { final List changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn.toString(), ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.DOCUMENTATION, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn.toString(), ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.TECHNICAL_SCHEMA, + auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), + to.getValue(), + urn.toString(), + ChangeCategory.GLOSSARY_TERM, + auditStamp)); return changeEvents; } - private static Urn getDatasetFieldUrn(final EditableSchemaFieldInfo previous, final EditableSchemaFieldInfo latest, String entityUrn) { + private static Urn getDatasetFieldUrn( + final EditableSchemaFieldInfo previous, + final EditableSchemaFieldInfo latest, + String entityUrn) { return previous != null ? getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), previous.getFieldPath()) : getSchemaFieldUrn(UrnUtils.getUrn(entityUrn), latest.getFieldPath()); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java index d5539ec3d3822..0c98eefe90ef2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGenerator.java @@ -13,38 +13,44 @@ import java.util.List; import javax.annotation.Nonnull; - -/** - * An abstract class to generate {@link ChangeEvent}s for a given entity aspect. - */ +/** An abstract class to generate {@link ChangeEvent}s for a given entity aspect. */ public abstract class EntityChangeEventGenerator { // TODO: Add a check for supported aspects protected SystemEntityClient _entityClient; protected Authentication _authentication; - public EntityChangeEventGenerator() { - } + public EntityChangeEventGenerator() {} public EntityChangeEventGenerator(@Nonnull final SystemEntityClient entityClient) { _entityClient = entityClient; } @Deprecated - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { // TODO: Migrate away from using getSemanticDiff. throw new UnsupportedOperationException(); } /** - * TODO: Migrate callers of the above API to below. The recommendation is to move timeline response creation into - * 2-stage. First stage generate change events, second stage derive semantic meaning + filter those change events. + * TODO: Migrate callers of the above API to below. The recommendation is to move timeline + * response creation into 2-stage. First stage generate change events, second stage derive + * semantic meaning + filter those change events. * - * Returns all {@link ChangeEvent}s computed from a raw aspect change. + *

Returns all {@link ChangeEvent}s computed from a raw aspect change. * - * Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of it, - * it is expected that the caller will filter the set of events as required. + *

Note that the {@link ChangeEvent} list can contain multiple {@link ChangeCategory} inside of + * it, it is expected that the caller will filter the set of events as required. */ - public abstract List getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect from, @Nonnull Aspect to, @Nonnull AuditStamp auditStamp); + public abstract List getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect from, + @Nonnull Aspect to, + @Nonnull AuditStamp auditStamp); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java index 330be8560c72b..824dc10ab2732 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorFactory.java @@ -4,20 +4,25 @@ import java.util.HashMap; import java.util.Map; - /** - * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, aspect-name + * A cheap factory for generating EntityChangeEvents, keyed by entity-type, element-type, + * aspect-name */ public class EntityChangeEventGeneratorFactory { - private final Map _entityChangeEventGeneratorMap = new HashMap<>(); + private final Map _entityChangeEventGeneratorMap = + new HashMap<>(); - public void addGenerator(String entityName, ChangeCategory elementName, String aspectName, + public void addGenerator( + String entityName, + ChangeCategory elementName, + String aspectName, EntityChangeEventGenerator differ) { _entityChangeEventGeneratorMap.put(entityName + elementName.name() + aspectName, differ); } - public EntityChangeEventGenerator getGenerator(String entityName, ChangeCategory category, String aspectName) { + public EntityChangeEventGenerator getGenerator( + String entityName, ChangeCategory category, String aspectName) { return _entityChangeEventGeneratorMap.get(entityName + category.name() + aspectName); } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java index 84c4343dc63ee..3a94bedcbd0ce 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityChangeEventGeneratorRegistry.java @@ -7,18 +7,15 @@ import java.util.Set; import javax.annotation.Nonnull; - -/** - * A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. - */ +/** A registry that maps an aspect name to one or more {@link EntityChangeEventGenerator}s. */ public class EntityChangeEventGeneratorRegistry { - private final Map>> entityChangeEventGenerators = new HashMap<>(); + private final Map>> entityChangeEventGenerators = + new HashMap<>(); - /** - * Registers a new EntityChangeEventGenerator for the given aspect. - */ - public void register(@Nonnull final String aspectName, + /** Registers a new EntityChangeEventGenerator for the given aspect. */ + public void register( + @Nonnull final String aspectName, @Nonnull final EntityChangeEventGenerator entityChangeEventGenerator) { Objects.requireNonNull(aspectName); Objects.requireNonNull(entityChangeEventGenerator); @@ -26,10 +23,9 @@ public void register(@Nonnull final String aspectName, entityChangeEventGenerators.get(aspectName).add(entityChangeEventGenerator); } - /** - * Registers a new Enity Change Generator, or null if one does not exist. - */ - public Set> getEntityChangeEventGenerators(@Nonnull final String aspectName) { + /** Registers a new Enity Change Generator, or null if one does not exist. */ + public Set> getEntityChangeEventGenerators( + @Nonnull final String aspectName) { final String key = Objects.requireNonNull(aspectName); return this.entityChangeEventGenerators.getOrDefault(key, new HashSet<>()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java index 7055c95a73eff..c90e96d3860fb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/EntityKeyChangeEventGenerator.java @@ -10,11 +10,12 @@ import java.util.List; import javax.annotation.Nonnull; - /** - * A general purpose differ which simply determines whether an entity has been created or hard deleted. + * A general purpose differ which simply determines whether an entity has been created or hard + * deleted. */ -public class EntityKeyChangeEventGenerator extends EntityChangeEventGenerator { +public class EntityKeyChangeEventGenerator + extends EntityChangeEventGenerator { @Override public List getChangeEvents( @Nonnull Urn urn, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java index 460a0ae399a5f..ef40c5dc81a3c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlobalTagsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,19 +21,20 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlobalTagsChangeEventGenerator extends EntityChangeEventGenerator { private static final String TAG_ADDED_FORMAT = "Tag '%s' added to entity '%s'."; private static final String TAG_REMOVED_FORMAT = "Tag '%s' removed from entity '%s'."; - public static List computeDiffs(GlobalTags baseGlobalTags, GlobalTags targetGlobalTags, String entityUrn, + public static List computeDiffs( + GlobalTags baseGlobalTags, + GlobalTags targetGlobalTags, + String entityUrn, AuditStamp auditStamp) { sortGlobalTagsByTagUrn(baseGlobalTags); sortGlobalTagsByTagUrn(targetGlobalTags); List changeEvents = new ArrayList<>(); - TagAssociationArray baseTags = (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); + TagAssociationArray baseTags = + (baseGlobalTags != null) ? baseGlobalTags.getTags() : new TagAssociationArray(); TagAssociationArray targetTags = (targetGlobalTags != null) ? targetGlobalTags.getTags() : new TagAssociationArray(); int baseTagIdx = 0; @@ -39,36 +42,46 @@ public static List computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size() && targetTagIdx < targetTags.size()) { TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - int comparison = baseTagAssociation.getTag().toString().compareTo(targetTagAssociation.getTag().toString()); + int comparison = + baseTagAssociation + .getTag() + .toString() + .compareTo(targetTagAssociation.getTag().toString()); if (comparison == 0) { // No change to this tag. ++baseTagIdx; ++targetTagIdx; } else if (comparison < 0) { // Tag got removed. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } else { // Tag got added. - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } } @@ -76,31 +89,35 @@ public static List computeDiffs(GlobalTags baseGlobalTags, GlobalTa while (baseTagIdx < baseTags.size()) { // Handle removed tags. TagAssociation baseTagAssociation = baseTags.get(baseTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(baseTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(baseTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(baseTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_REMOVED_FORMAT, baseTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(baseTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++baseTagIdx; } while (targetTagIdx < targetTags.size()) { // Handle newly added tags. TagAssociation targetTagAssociation = targetTags.get(targetTagIdx); - changeEvents.add(TagChangeEvent.entityTagChangeEventBuilder() - .modifier(targetTagAssociation.getTag().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.TAG) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) - .tagUrn(targetTagAssociation.getTag()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + TagChangeEvent.entityTagChangeEventBuilder() + .modifier(targetTagAssociation.getTag().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.TAG) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(TAG_ADDED_FORMAT, targetTagAssociation.getTag().getId(), entityUrn)) + .tagUrn(targetTagAssociation.getTag()) + .auditStamp(auditStamp) + .build()); ++targetTagIdx; } return changeEvents; @@ -123,10 +140,14 @@ private static GlobalTags getGlobalTagsFromAspect(EntityAspect entityAspect) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOBAL_TAGS_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOBAL_TAGS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOBAL_TAGS_ASPECT_NAME); } @@ -134,7 +155,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlobalTags targetGlobalTags = getGlobalTagsFromAspect(currentValue); List changeEvents = new ArrayList<>(); if (element == ChangeCategory.TAG) { - changeEvents.addAll(computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlobalTags, targetGlobalTags, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java index f8b7794df531f..eb002a9a83cea 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermInfoChangeEventGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -17,97 +20,111 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator.*; - +public class GlossaryTermInfoChangeEventGenerator + extends EntityChangeEventGenerator { + private static List computeDiffs( + GlossaryTermInfo baseDatasetProperties, + @Nonnull GlossaryTermInfo targetDatasetProperties, + @Nonnull String entityUrn, + AuditStamp auditStamp) { + List changeEvents = new ArrayList<>(); + String baseDescription = + (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; + String targetDescription = + (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; -public class GlossaryTermInfoChangeEventGenerator extends EntityChangeEventGenerator { - private static List computeDiffs(GlossaryTermInfo baseDatasetProperties, - @Nonnull GlossaryTermInfo targetDatasetProperties, @Nonnull String entityUrn, AuditStamp auditStamp) { - List changeEvents = new ArrayList<>(); - String baseDescription = (baseDatasetProperties != null) ? baseDatasetProperties.getDefinition() : null; - String targetDescription = (targetDatasetProperties != null) ? targetDatasetProperties.getDefinition() : null; - - if (baseDescription == null && targetDescription != null) { - // Description added - changeEvents.add(ChangeEvent.builder().entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription == null) { - // Description removed. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) - .auditStamp(auditStamp) - .build()); - } else if (baseDescription != null && targetDescription != null && !baseDescription.equals(targetDescription)) { - // Description has been modified. - changeEvents.add(ChangeEvent.builder() - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) - .auditStamp(auditStamp) - .build()); - } - return changeEvents; + if (baseDescription == null && targetDescription != null) { + // Description added + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_ADDED, entityUrn, targetDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null && targetDescription == null) { + // Description removed. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description(String.format(DESCRIPTION_REMOVED, entityUrn, baseDescription)) + .auditStamp(auditStamp) + .build()); + } else if (baseDescription != null + && targetDescription != null + && !baseDescription.equals(targetDescription)) { + // Description has been modified. + changeEvents.add( + ChangeEvent.builder() + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format(DESCRIPTION_CHANGED, entityUrn, baseDescription, targetDescription)) + .auditStamp(auditStamp) + .build()); } + return changeEvents; + } - @Nullable - private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { - if (entityAspect != null && entityAspect.getMetadata() != null) { - return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); - } - return null; + @Nullable + private static GlossaryTermInfo getGlossaryTermInfoFromAspect(EntityAspect entityAspect) { + if (entityAspect != null && entityAspect.getMetadata() != null) { + return RecordUtils.toRecordTemplate(GlossaryTermInfo.class, entityAspect.getMetadata()); } + return null; + } - @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { - if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); - } - List changeEvents = new ArrayList<>(); - if (element == ChangeCategory.DOCUMENTATION) { - GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); - GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); - changeEvents.addAll(computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); - } - - // Assess the highest change at the transaction(schema) level. - SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; - ChangeEvent highestChangeEvent = - changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); - if (highestChangeEvent != null) { - highestSemanticChange = highestChangeEvent.getSemVerChange(); - } - - return ChangeTransaction.builder() - .semVerChange(highestSemanticChange) - .changeEvents(changeEvents) - .timestamp(currentValue.getCreatedOn().getTime()) - .rawDiff(rawDiffsRequested ? rawDiff : null) - .actor(currentValue.getCreatedBy()) - .build(); + @Override + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { + if (!previousValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERM_INFO_ASPECT_NAME); + } + List changeEvents = new ArrayList<>(); + if (element == ChangeCategory.DOCUMENTATION) { + GlossaryTermInfo baseGlossaryTermInfo = getGlossaryTermInfoFromAspect(previousValue); + GlossaryTermInfo targetGlossaryTermInfo = getGlossaryTermInfoFromAspect(currentValue); + changeEvents.addAll( + computeDiffs(baseGlossaryTermInfo, targetGlossaryTermInfo, currentValue.getUrn(), null)); } - @Override - public List getChangeEvents( - @Nonnull Urn urn, - @Nonnull String entity, - @Nonnull String aspect, - @Nonnull Aspect from, - @Nonnull Aspect to, - @Nonnull AuditStamp auditStamp) { - return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + // Assess the highest change at the transaction(schema) level. + SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; + ChangeEvent highestChangeEvent = + changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); + if (highestChangeEvent != null) { + highestSemanticChange = highestChangeEvent.getSemVerChange(); } + + return ChangeTransaction.builder() + .semVerChange(highestSemanticChange) + .changeEvents(changeEvents) + .timestamp(currentValue.getCreatedOn().getTime()) + .rawDiff(rawDiffsRequested ? rawDiff : null) + .actor(currentValue.getCreatedBy()) + .build(); + } + + @Override + public List getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect from, + @Nonnull Aspect to, + @Nonnull AuditStamp auditStamp) { + return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); + } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java index 22b2033ec52bc..6e56a7e7bbeb1 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/GlossaryTermsChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,62 +21,78 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GlossaryTermsChangeEventGenerator extends EntityChangeEventGenerator { private static final String GLOSSARY_TERM_ADDED_FORMAT = "Term '%s' added to entity '%s'."; private static final String GLOSSARY_TERM_REMOVED_FORMAT = "Term '%s' removed from entity '%s'."; - public static List computeDiffs(GlossaryTerms baseGlossaryTerms, GlossaryTerms targetGlossaryTerms, - String entityUrn, AuditStamp auditStamp) { + public static List computeDiffs( + GlossaryTerms baseGlossaryTerms, + GlossaryTerms targetGlossaryTerms, + String entityUrn, + AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); sortGlossaryTermsByGlossaryTermUrn(baseGlossaryTerms); sortGlossaryTermsByGlossaryTermUrn(targetGlossaryTerms); GlossaryTermAssociationArray baseTerms = - (baseGlossaryTerms != null) ? baseGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (baseGlossaryTerms != null) + ? baseGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); GlossaryTermAssociationArray targetTerms = - (targetGlossaryTerms != null) ? targetGlossaryTerms.getTerms() : new GlossaryTermAssociationArray(); + (targetGlossaryTerms != null) + ? targetGlossaryTerms.getTerms() + : new GlossaryTermAssociationArray(); int baseGlossaryTermIdx = 0; int targetGlossaryTermIdx = 0; while (baseGlossaryTermIdx < baseTerms.size() && targetGlossaryTermIdx < targetTerms.size()) { GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); int comparison = - baseGlossaryTermAssociation.getUrn().toString().compareTo(targetGlossaryTermAssociation.getUrn().toString()); + baseGlossaryTermAssociation + .getUrn() + .toString() + .compareTo(targetGlossaryTermAssociation.getUrn().toString()); if (comparison == 0) { ++baseGlossaryTermIdx; ++targetGlossaryTermIdx; } else if (comparison < 0) { // GlossaryTerm got removed. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } else { // GlossaryTerm got added. - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } } @@ -82,33 +100,42 @@ public static List computeDiffs(GlossaryTerms baseGlossaryTerms, Gl while (baseGlossaryTermIdx < baseTerms.size()) { // Handle removed glossary terms. GlossaryTermAssociation baseGlossaryTermAssociation = baseTerms.get(baseGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(baseGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_REMOVED_FORMAT, baseGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(baseGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(baseGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_REMOVED_FORMAT, + baseGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(baseGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++baseGlossaryTermIdx; } while (targetGlossaryTermIdx < targetTerms.size()) { // Handle newly added glossary terms. - GlossaryTermAssociation targetGlossaryTermAssociation = targetTerms.get(targetGlossaryTermIdx); - changeEvents.add(GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() - .modifier(targetGlossaryTermAssociation.getUrn().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.GLOSSARY_TERM) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(GLOSSARY_TERM_ADDED_FORMAT, targetGlossaryTermAssociation.getUrn().getId(), entityUrn)) - .termUrn(targetGlossaryTermAssociation.getUrn()) - .auditStamp(auditStamp) - .build()); + GlossaryTermAssociation targetGlossaryTermAssociation = + targetTerms.get(targetGlossaryTermIdx); + changeEvents.add( + GlossaryTermChangeEvent.entityGlossaryTermChangeEventBuilder() + .modifier(targetGlossaryTermAssociation.getUrn().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.GLOSSARY_TERM) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + GLOSSARY_TERM_ADDED_FORMAT, + targetGlossaryTermAssociation.getUrn().getId(), + entityUrn)) + .termUrn(targetGlossaryTermAssociation.getUrn()) + .auditStamp(auditStamp) + .build()); ++targetGlossaryTermIdx; } return changeEvents; @@ -119,7 +146,8 @@ private static void sortGlossaryTermsByGlossaryTermUrn(GlossaryTerms globalGloss return; } List glossaryTerms = new ArrayList<>(globalGlossaryTerms.getTerms()); - glossaryTerms.sort(Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); + glossaryTerms.sort( + Comparator.comparing(GlossaryTermAssociation::getUrn, Comparator.comparing(Urn::toString))); globalGlossaryTerms.setTerms(new GlossaryTermAssociationArray(glossaryTerms)); } @@ -131,15 +159,19 @@ private static GlossaryTerms getGlossaryTermsFromAspect(EntityAspect entityAspec } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) || !currentValue.getAspect() - .equals(GLOSSARY_TERMS_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME) + || !currentValue.getAspect().equals(GLOSSARY_TERMS_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + GLOSSARY_TERMS_ASPECT_NAME); } @@ -147,7 +179,8 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec GlossaryTerms targetGlossaryTerms = getGlossaryTermsFromAspect(currentValue); List changeEvents = new ArrayList<>(); if (element == ChangeCategory.GLOSSARY_TERM) { - changeEvents.addAll(computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseGlossaryTerms, targetGlossaryTerms, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java index a23d76e47755c..bf3ff3293d2a2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/InstitutionalMemoryChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,10 +21,8 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGenerator { +public class InstitutionalMemoryChangeEventGenerator + extends EntityChangeEventGenerator { private static final String INSTITUTIONAL_MEMORY_ADDED_FORMAT = "Institutional Memory '%s' with documentation of '%s' has been added: '%s'"; @@ -31,17 +31,22 @@ public class InstitutionalMemoryChangeEventGenerator extends EntityChangeEventGe private static final String INSTITUTIONAL_MEMORY_MODIFIED_FORMAT = "Documentation of Institutional Memory '%s' of '%s' has been changed from '%s' to '%s'."; - private static List computeDiffs(InstitutionalMemory baseInstitutionalMemory, - InstitutionalMemory targetInstitutionalMemory, String entityUrn, AuditStamp auditStamp) { + private static List computeDiffs( + InstitutionalMemory baseInstitutionalMemory, + InstitutionalMemory targetInstitutionalMemory, + String entityUrn, + AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); sortElementsByUrl(baseInstitutionalMemory); sortElementsByUrl(targetInstitutionalMemory); InstitutionalMemoryMetadataArray baseElements = - (baseInstitutionalMemory != null) ? baseInstitutionalMemory.getElements() + (baseInstitutionalMemory != null) + ? baseInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); InstitutionalMemoryMetadataArray targetElements = - (targetInstitutionalMemory != null) ? targetInstitutionalMemory.getElements() + (targetInstitutionalMemory != null) + ? targetInstitutionalMemory.getElements() : new InstitutionalMemoryMetadataArray(); int baseIdx = 0; @@ -53,46 +58,60 @@ private static List computeDiffs(InstitutionalMemory baseInstitutio if (comparison == 0) { if (!baseElement.getDescription().equals(targetElement.getDescription())) { // InstitutionalMemory description has changed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description(String.format(INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription(), targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + INSTITUTIONAL_MEMORY_MODIFIED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription(), + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); } ++baseIdx; ++targetIdx; } else if (comparison < 0) { // InstitutionalMemory got removed. - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } else { // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } } @@ -100,34 +119,42 @@ private static List computeDiffs(InstitutionalMemory baseInstitutio while (baseIdx < baseElements.size()) { // InstitutionalMemory got removed. InstitutionalMemoryMetadata baseElement = baseElements.get(baseIdx); - changeEvents.add(ChangeEvent.builder() - .modifier(baseElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_REMOVED_FORMAT, baseElement.getUrl(), entityUrn, - baseElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(baseElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_REMOVED_FORMAT, + baseElement.getUrl(), + entityUrn, + baseElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++baseIdx; } while (targetIdx < targetElements.size()) { // Newly added owners. InstitutionalMemoryMetadata targetElement = targetElements.get(targetIdx); // InstitutionalMemory got added.. - changeEvents.add(ChangeEvent.builder() - .modifier(targetElement.getUrl().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.DOCUMENTATION) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description( - String.format(INSTITUTIONAL_MEMORY_ADDED_FORMAT, targetElement.getUrl(), entityUrn, - targetElement.getDescription())) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + ChangeEvent.builder() + .modifier(targetElement.getUrl().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.DOCUMENTATION) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + INSTITUTIONAL_MEMORY_ADDED_FORMAT, + targetElement.getUrl(), + entityUrn, + targetElement.getDescription())) + .auditStamp(auditStamp) + .build()); ++targetIdx; } return changeEvents; @@ -145,20 +172,26 @@ private static void sortElementsByUrl(InstitutionalMemory institutionalMemory) { return; } List elements = new ArrayList<>(institutionalMemory.getElements()); - elements.sort(Comparator.comparing(InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); + elements.sort( + Comparator.comparing( + InstitutionalMemoryMetadata::getUrl, Comparator.comparing(Url::toString))); institutionalMemory.setElements(new InstitutionalMemoryMetadataArray(elements)); } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) || !currentValue.getAspect() - .equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME) + || !currentValue.getAspect().equals(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + INSTITUTIONAL_MEMORY_ASPECT_NAME); } @@ -166,7 +199,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec InstitutionalMemory targetInstitutionalMemory = getInstitutionalMemoryFromAspect(currentValue); List changeEvents = new ArrayList<>(); if (element == ChangeCategory.DOCUMENTATION) { - changeEvents.addAll(computeDiffs(baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs( + baseInstitutionalMemory, targetInstitutionalMemory, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java index f5697aea25b9a..b32958508cf24 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/OwnershipChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -19,23 +21,21 @@ import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipChangeEventGenerator extends EntityChangeEventGenerator { private static final String OWNER_ADDED_FORMAT = "'%s' added as a `%s` of '%s'."; private static final String OWNER_REMOVED_FORMAT = "'%s' removed as a `%s` of '%s'."; private static final String OWNERSHIP_TYPE_CHANGE_FORMAT = "'%s''s ownership type changed from '%s' to '%s' for '%s'."; - private static List computeDiffs(Ownership baseOwnership, Ownership targetOwnership, String entityUrn, - AuditStamp auditStamp) { + private static List computeDiffs( + Ownership baseOwnership, Ownership targetOwnership, String entityUrn, AuditStamp auditStamp) { List changeEvents = new ArrayList<>(); sortOwnersByUrn(baseOwnership); sortOwnersByUrn(targetOwnership); OwnerArray baseOwners = (baseOwnership != null) ? baseOwnership.getOwners() : new OwnerArray(); - OwnerArray targetOwners = (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); + OwnerArray targetOwners = + (targetOwnership != null) ? targetOwnership.getOwners() : new OwnerArray(); int baseOwnerIdx = 0; int targetOwnerIdx = 0; @@ -46,49 +46,66 @@ private static List computeDiffs(Ownership baseOwnership, Ownership if (comparison == 0) { if (!baseOwner.getType().equals(targetOwner.getType())) { // Ownership type has changed. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getType().name()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.PATCH) - .description( - String.format(OWNERSHIP_TYPE_CHANGE_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), - targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getType().name()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.PATCH) + .description( + String.format( + OWNERSHIP_TYPE_CHANGE_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); } ++baseOwnerIdx; ++targetOwnerIdx; } else if (comparison < 0) { // Owner got removed - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } else { // Owner got added. - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } } @@ -96,33 +113,45 @@ private static List computeDiffs(Ownership baseOwnership, Ownership while (baseOwnerIdx < baseOwners.size()) { // Handle removed owners. Owner baseOwner = baseOwners.get(baseOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(baseOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_REMOVED_FORMAT, baseOwner.getOwner().getId(), baseOwner.getType(), entityUrn)) - .ownerUrn(baseOwner.getOwner()) - .ownerType(baseOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(baseOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_REMOVED_FORMAT, + baseOwner.getOwner().getId(), + baseOwner.getType(), + entityUrn)) + .ownerUrn(baseOwner.getOwner()) + .ownerType(baseOwner.getType()) + .auditStamp(auditStamp) + .build()); ++baseOwnerIdx; } while (targetOwnerIdx < targetOwners.size()) { // Newly added owners. Owner targetOwner = targetOwners.get(targetOwnerIdx); - changeEvents.add(OwnerChangeEvent.entityOwnerChangeEventBuilder() - .modifier(targetOwner.getOwner().toString()) - .entityUrn(entityUrn) - .category(ChangeCategory.OWNER) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(String.format(OWNER_ADDED_FORMAT, targetOwner.getOwner().getId(), targetOwner.getType(), entityUrn)) - .ownerUrn(targetOwner.getOwner()) - .ownerType(targetOwner.getType()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + OwnerChangeEvent.entityOwnerChangeEventBuilder() + .modifier(targetOwner.getOwner().toString()) + .entityUrn(entityUrn) + .category(ChangeCategory.OWNER) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + String.format( + OWNER_ADDED_FORMAT, + targetOwner.getOwner().getId(), + targetOwner.getType(), + entityUrn)) + .ownerUrn(targetOwner.getOwner()) + .ownerType(targetOwner.getType()) + .auditStamp(auditStamp) + .build()); ++targetOwnerIdx; } return changeEvents; @@ -145,15 +174,19 @@ private static void sortOwnersByUrn(Ownership ownership) { } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory element, JsonPatch rawDiff, boolean rawDiffsRequested) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory element, + JsonPatch rawDiff, + boolean rawDiffsRequested) { if (currentValue == null) { throw new IllegalArgumentException("EntityAspect currentValue should not be null"); } - if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) || !currentValue.getAspect() - .equals(OWNERSHIP_ASPECT_NAME)) { + if (!previousValue.getAspect().equals(OWNERSHIP_ASPECT_NAME) + || !currentValue.getAspect().equals(OWNERSHIP_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + OWNERSHIP_ASPECT_NAME); } @@ -162,11 +195,13 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List changeEvents = new ArrayList<>(); if (element == ChangeCategory.OWNER) { - changeEvents.addAll(computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); + changeEvents.addAll( + computeDiffs(baseOwnership, targetOwnership, currentValue.getUrn(), null)); } // Assess the highest change at the transaction(schema) level. - // Why isn't this done at changeevent level - what if transaction contains multiple category events? + // Why isn't this done at changeevent level - what if transaction contains multiple category + // events? SemanticChangeType highestSemanticChange = SemanticChangeType.NONE; ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java index 2e0a8586cba60..1fd5d6e2c0f7a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGenerator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; + import com.datahub.util.RecordUtils; import com.github.fge.jsonpatch.JsonPatch; import com.linkedin.common.AuditStamp; @@ -29,14 +31,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang.StringUtils; -import static com.linkedin.metadata.timeline.eventgenerator.ChangeEventGeneratorUtils.*; - - @Slf4j public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerator { private static final String SCHEMA_METADATA_ASPECT_NAME = "schemaMetadata"; - private static final String BACKWARDS_INCOMPATIBLE_DESC = "A backwards incompatible change due to"; - private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = "A forwards & backwards compatible change due to "; + private static final String BACKWARDS_INCOMPATIBLE_DESC = + "A backwards incompatible change due to"; + private static final String BACK_AND_FORWARD_COMPATIBLE_DESC = + "A forwards & backwards compatible change due to "; private static final String FIELD_DESCRIPTION_ADDED_FORMAT = "The description '%s' for the field '%s' has been added."; private static final String FIELD_DESCRIPTION_REMOVED_FORMAT = @@ -44,8 +45,11 @@ public class SchemaMetadataChangeEventGenerator extends EntityChangeEventGenerat private static final String FIELD_DESCRIPTION_MODIFIED_FORMAT = "The description for the field '%s' has been changed from '%s' to '%s'."; - private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, @Nullable SchemaField targetField, - String datasetFieldUrn, AuditStamp auditStamp) { + private static ChangeEvent getDescriptionChange( + @Nullable SchemaField baseField, + @Nullable SchemaField targetField, + String datasetFieldUrn, + AuditStamp auditStamp) { String baseDescription = (baseField != null) ? baseField.getDescription() : null; String targetDescription = (targetField != null) ? targetField.getDescription() : null; if (baseDescription == null && targetDescription != null) { @@ -55,7 +59,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_ADDED_FORMAT, targetDescription, targetField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -66,7 +72,9 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.MINOR) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) + .description( + String.format( + FIELD_DESCRIPTION_REMOVED_FORMAT, baseDescription, baseField.getFieldPath())) .auditStamp(auditStamp) .build(); } @@ -77,26 +85,36 @@ private static ChangeEvent getDescriptionChange(@Nullable SchemaField baseField, .semVerChange(SemanticChangeType.PATCH) .category(ChangeCategory.DOCUMENTATION) .entityUrn(datasetFieldUrn) - .description(String.format(FIELD_DESCRIPTION_MODIFIED_FORMAT, baseField.getFieldPath(), baseDescription, - targetDescription)) + .description( + String.format( + FIELD_DESCRIPTION_MODIFIED_FORMAT, + baseField.getFieldPath(), + baseDescription, + targetDescription)) .auditStamp(auditStamp) .build(); } return null; } - private static List getGlobalTagChangeEvents(SchemaField baseField, SchemaField targetField, + private static List getGlobalTagChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityTagChangeEvent, then rebind into a SchemaFieldTagChangeEvent. List entityTagChangeEvents = - GlobalTagsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlobalTags() : null, - targetField != null ? targetField.getGlobalTags() : null, datasetFieldUrn, auditStamp); + GlobalTagsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlobalTags() : null, + targetField != null ? targetField.getGlobalTags() : null, + datasetFieldUrn, + auditStamp); if (baseField != null || targetField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityTagChangeEvent into a SchemaFieldTagChangeEvent. final Urn parentUrn; try { @@ -106,27 +124,30 @@ private static List getGlobalTagChangeEvents(SchemaField baseField, return Collections.emptyList(); } - return convertEntityTagChangeEvents( - fieldPath, - parentUrn, - entityTagChangeEvents); + return convertEntityTagChangeEvents(fieldPath, parentUrn, entityTagChangeEvents); } return Collections.emptyList(); } - private static List getGlossaryTermsChangeEvents(SchemaField baseField, SchemaField targetField, + private static List getGlossaryTermsChangeEvents( + SchemaField baseField, + SchemaField targetField, String parentUrnStr, String datasetFieldUrn, AuditStamp auditStamp) { // 1. Get EntityGlossaryTermChangeEvent, then rebind into a SchemaFieldGlossaryTermChangeEvent. List entityGlossaryTermsChangeEvents = - GlossaryTermsChangeEventGenerator.computeDiffs(baseField != null ? baseField.getGlossaryTerms() : null, - targetField != null ? targetField.getGlossaryTerms() : null, datasetFieldUrn, auditStamp); + GlossaryTermsChangeEventGenerator.computeDiffs( + baseField != null ? baseField.getGlossaryTerms() : null, + targetField != null ? targetField.getGlossaryTerms() : null, + datasetFieldUrn, + auditStamp); if (targetField != null || baseField != null) { - String fieldPath = targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); + String fieldPath = + targetField != null ? targetField.getFieldPath() : baseField.getFieldPath(); // 2. Convert EntityGlossaryTermChangeEvent into a SchemaFieldGlossaryTermChangeEvent. final Urn parentUrn; try { @@ -137,16 +158,18 @@ private static List getGlossaryTermsChangeEvents(SchemaField baseFi } return convertEntityGlossaryTermChangeEvents( - fieldPath, - parentUrn, - entityGlossaryTermsChangeEvents); + fieldPath, parentUrn, entityGlossaryTermsChangeEvents); } return Collections.emptyList(); } - private static List getFieldPropertyChangeEvents(SchemaField baseField, SchemaField targetField, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List getFieldPropertyChangeEvents( + SchemaField baseField, + SchemaField targetField, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { List propChangeEvents = new ArrayList<>(); String datasetFieldUrn; if (targetField != null) { @@ -157,7 +180,8 @@ private static List getFieldPropertyChangeEvents(SchemaField baseFi // Description Change. if (ChangeCategory.DOCUMENTATION.equals(changeCategory)) { - ChangeEvent descriptionChangeEvent = getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); + ChangeEvent descriptionChangeEvent = + getDescriptionChange(baseField, targetField, datasetFieldUrn, auditStamp); if (descriptionChangeEvent != null) { propChangeEvents.add(descriptionChangeEvent); } @@ -165,20 +189,28 @@ private static List getFieldPropertyChangeEvents(SchemaField baseFi // Global Tags if (ChangeCategory.TAG.equals(changeCategory)) { - propChangeEvents.addAll(getGlobalTagChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlobalTagChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } // Glossary terms. if (ChangeCategory.GLOSSARY_TERM.equals(changeCategory)) { - propChangeEvents.addAll(getGlossaryTermsChangeEvents(baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); + propChangeEvents.addAll( + getGlossaryTermsChangeEvents( + baseField, targetField, datasetUrn.toString(), datasetFieldUrn, auditStamp)); } return propChangeEvents; } // TODO: This could use some cleanup, lots of repeated logic and tenuous conditionals - private static List computeDiffs(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, ChangeCategory changeCategory, AuditStamp auditStamp) { + private static List computeDiffs( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + ChangeCategory changeCategory, + AuditStamp auditStamp) { // Sort the fields by their field path. if (baseSchema != null) { sortFieldsByPath(baseSchema); @@ -188,8 +220,10 @@ private static List computeDiffs(SchemaMetadata baseSchema, SchemaM } // Performs ordinal based diff, primarily based on fixed field ordinals and their types. - SchemaFieldArray baseFields = (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); - SchemaFieldArray targetFields = targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); + SchemaFieldArray baseFields = + (baseSchema != null ? baseSchema.getFields() : new SchemaFieldArray()); + SchemaFieldArray targetFields = + targetSchema != null ? targetSchema.getFields() : new SchemaFieldArray(); int baseFieldIdx = 0; int targetFieldIdx = 0; List changeEvents = new ArrayList<>(); @@ -197,7 +231,7 @@ private static List computeDiffs(SchemaMetadata baseSchema, SchemaM while (baseFieldIdx < baseFields.size() && targetFieldIdx < targetFields.size()) { SchemaField curBaseField = baseFields.get(baseFieldIdx); SchemaField curTargetField = targetFields.get(targetFieldIdx); - //TODO: Re-evaluate ordinal processing? + // TODO: Re-evaluate ordinal processing? int comparison = curBaseField.getFieldPath().compareTo(curTargetField.getFieldPath()); if (renamedFields.contains(curBaseField)) { baseFieldIdx++; @@ -208,61 +242,78 @@ private static List computeDiffs(SchemaMetadata baseSchema, SchemaM if (!curBaseField.getNativeDataType().equals(curTargetField.getNativeDataType())) { // Non-backward compatible change + Major version bump if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(String.format("%s native datatype of the field '%s' changed from '%s' to '%s'.", - BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNativeDataType(), - curTargetField.getNativeDataType())) - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + String.format( + "%s native datatype of the field '%s' changed from '%s' to '%s'.", + BACKWARDS_INCOMPATIBLE_DESC, + getFieldPathV1(curTargetField), + curBaseField.getNativeDataType(), + curTargetField.getNativeDataType())) + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } List propChangeEvents = - getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; ++targetFieldIdx; } else if (comparison < 0) { - // Base Field was removed or was renamed. Non-backward compatible change + Major version bump + // Base Field was removed or was renamed. Non-backward compatible change + Major version + // bump // Check for rename, if rename coincides with other modifications we assume drop/add. - // Assumes that two different fields on the same schema would not have the same description, terms, + // Assumes that two different fields on the same schema would not have the same description, + // terms, // or tags and share the same type - SchemaField renamedField = findRenamedField(curBaseField, - targetFields.subList(targetFieldIdx, targetFields.size()), renamedFields); + SchemaField renamedField = + findRenamedField( + curBaseField, + targetFields.subList(targetFieldIdx, targetFields.size()), + renamedFields); if (renamedField == null) { processRemoval(changeCategory, changeEvents, datasetUrn, curBaseField, auditStamp); ++baseFieldIdx; } else { changeEvents.add(generateRenameEvent(datasetUrn, curBaseField, renamedField, auditStamp)); - List propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + List propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++baseFieldIdx; renamedFields.add(renamedField); } } else { - // The targetField got added or a renaming occurred. Forward & backwards compatible change + minor version bump. - SchemaField renamedField = findRenamedField(curTargetField, - baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); + // The targetField got added or a renaming occurred. Forward & backwards compatible change + + // minor version bump. + SchemaField renamedField = + findRenamedField( + curTargetField, baseFields.subList(baseFieldIdx, baseFields.size()), renamedFields); if (renamedField == null) { processAdd(changeCategory, changeEvents, datasetUrn, curTargetField, auditStamp); ++targetFieldIdx; } else { - changeEvents.add(generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); - List propChangeEvents = getFieldPropertyChangeEvents(curBaseField, curTargetField, datasetUrn, - changeCategory, auditStamp); + changeEvents.add( + generateRenameEvent(datasetUrn, renamedField, curTargetField, auditStamp)); + List propChangeEvents = + getFieldPropertyChangeEvents( + curBaseField, curTargetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); ++targetFieldIdx; renamedFields.add(renamedField); @@ -287,7 +338,8 @@ BACKWARDS_INCOMPATIBLE_DESC, getFieldPathV1(curTargetField), curBaseField.getNat } // Handle primary key constraint change events. - List primaryKeyChangeEvents = getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); + List primaryKeyChangeEvents = + getPrimaryKeyChangeEvents(baseSchema, targetSchema, datasetUrn, auditStamp); changeEvents.addAll(primaryKeyChangeEvents); // Handle foreign key constraint change events. @@ -306,16 +358,19 @@ private static void sortFieldsByPath(SchemaMetadata schemaMetadata) { schemaMetadata.setFields(new SchemaFieldArray(schemaFields)); } - private static SchemaField findRenamedField(SchemaField curField, List targetFields, Set renamedFields) { + private static SchemaField findRenamedField( + SchemaField curField, List targetFields, Set renamedFields) { return targetFields.stream() .filter(schemaField -> isRenamed(curField, schemaField)) .filter(field -> !renamedFields.contains(field)) - .findFirst().orElse(null); + .findFirst() + .orElse(null); } private static boolean isRenamed(SchemaField curField, SchemaField schemaField) { return curField.getNativeDataType().equals(schemaField.getNativeDataType()) - && parentFieldsMatch(curField, schemaField) && descriptionsMatch(curField, schemaField); + && parentFieldsMatch(curField, schemaField) + && descriptionsMatch(curField, schemaField); } private static boolean parentFieldsMatch(SchemaField curField, SchemaField schemaField) { @@ -324,73 +379,98 @@ private static boolean parentFieldsMatch(SchemaField curField, SchemaField schem if (curFieldIndex > 0 && schemaFieldIndex > 0) { String curFieldParentPath = curField.getFieldPath().substring(0, curFieldIndex); String schemaFieldParentPath = schemaField.getFieldPath().substring(0, schemaFieldIndex); - return StringUtils.isNotBlank(curFieldParentPath) && curFieldParentPath.equals(schemaFieldParentPath); + return StringUtils.isNotBlank(curFieldParentPath) + && curFieldParentPath.equals(schemaFieldParentPath); } // No parent field return curFieldIndex < 0 && schemaFieldIndex < 0; } private static boolean descriptionsMatch(SchemaField curField, SchemaField schemaField) { - return StringUtils.isNotBlank(curField.getDescription()) && curField.getDescription().equals(schemaField.getDescription()); + return StringUtils.isNotBlank(curField.getDescription()) + && curField.getDescription().equals(schemaField.getDescription()); } - private static void processRemoval(ChangeCategory changeCategory, List changeEvents, Urn datasetUrn, - SchemaField baseField, AuditStamp auditStamp) { + private static void processRemoval( + ChangeCategory changeCategory, + List changeEvents, + Urn datasetUrn, + SchemaField baseField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.REMOVE) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of field: '" + getFieldPathV1(baseField) + "'.") - .fieldPath(baseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) - .nullable(baseField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, baseField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.REMOVE) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of field: '" + + getFieldPathV1(baseField) + + "'.") + .fieldPath(baseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, baseField)) + .nullable(baseField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List propChangeEvents = getFieldPropertyChangeEvents(baseField, null, datasetUrn, - changeCategory, auditStamp); + List propChangeEvents = + getFieldPropertyChangeEvents(baseField, null, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static void processAdd(ChangeCategory changeCategory, List changeEvents, Urn datasetUrn, - SchemaField targetField, AuditStamp auditStamp) { + private static void processAdd( + ChangeCategory changeCategory, + List changeEvents, + Urn datasetUrn, + SchemaField targetField, + AuditStamp auditStamp) { if (ChangeCategory.TECHNICAL_SCHEMA.equals(changeCategory)) { - changeEvents.add(DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) - .entityUrn(datasetUrn.toString()) - .category(ChangeCategory.TECHNICAL_SCHEMA) - .operation(ChangeOperation.ADD) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "the newly added field '" + getFieldPathV1(targetField) + "'.") - .fieldPath(targetField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) - .nullable(targetField.isNullable()) - .auditStamp(auditStamp) - .build()); + changeEvents.add( + DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .modifier(getSchemaFieldUrn(datasetUrn, targetField).toString()) + .entityUrn(datasetUrn.toString()) + .category(ChangeCategory.TECHNICAL_SCHEMA) + .operation(ChangeOperation.ADD) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "the newly added field '" + + getFieldPathV1(targetField) + + "'.") + .fieldPath(targetField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, targetField)) + .nullable(targetField.isNullable()) + .auditStamp(auditStamp) + .build()); } - List propChangeEvents = getFieldPropertyChangeEvents(null, targetField, datasetUrn, - changeCategory, auditStamp); + List propChangeEvents = + getFieldPropertyChangeEvents(null, targetField, datasetUrn, changeCategory, auditStamp); changeEvents.addAll(propChangeEvents); } - private static ChangeEvent generateRenameEvent(Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, - AuditStamp auditStamp) { - return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MINOR) - .description(BACK_AND_FORWARD_COMPATIBLE_DESC + "renaming of the field '" + getFieldPathV1(curBaseField) - + " to " + getFieldPathV1(curTargetField) + "'.") - .fieldPath(curBaseField.getFieldPath()) - .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) - .nullable(curBaseField.isNullable()) - .auditStamp(auditStamp) - .build(); + private static ChangeEvent generateRenameEvent( + Urn datasetUrn, SchemaField curBaseField, SchemaField curTargetField, AuditStamp auditStamp) { + return DatasetSchemaFieldChangeEvent.schemaFieldChangeEventBuilder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, curBaseField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MINOR) + .description( + BACK_AND_FORWARD_COMPATIBLE_DESC + + "renaming of the field '" + + getFieldPathV1(curBaseField) + + " to " + + getFieldPathV1(curTargetField) + + "'.") + .fieldPath(curBaseField.getFieldPath()) + .fieldUrn(getSchemaFieldUrn(datasetUrn, curBaseField)) + .nullable(curBaseField.isNullable()) + .auditStamp(auditStamp) + .build(); } private static SchemaMetadata getSchemaMetadataFromAspect(EntityAspect entityAspect) { @@ -407,49 +487,73 @@ private static List getForeignKeyChangeEvents() { return foreignKeyChangeEvents; } - private static List getPrimaryKeyChangeEvents(SchemaMetadata baseSchema, SchemaMetadata targetSchema, - Urn datasetUrn, AuditStamp auditStamp) { + private static List getPrimaryKeyChangeEvents( + SchemaMetadata baseSchema, + SchemaMetadata targetSchema, + Urn datasetUrn, + AuditStamp auditStamp) { List primaryKeyChangeEvents = new ArrayList<>(); Set basePrimaryKeys = - (baseSchema != null && baseSchema.getPrimaryKeys() != null) ? new HashSet<>(baseSchema.getPrimaryKeys()) + (baseSchema != null && baseSchema.getPrimaryKeys() != null) + ? new HashSet<>(baseSchema.getPrimaryKeys()) : new HashSet<>(); Set targetPrimaryKeys = - (targetSchema != null && targetSchema.getPrimaryKeys() != null) ? new HashSet<>(targetSchema.getPrimaryKeys()) : new HashSet<>(); + (targetSchema != null && targetSchema.getPrimaryKeys() != null) + ? new HashSet<>(targetSchema.getPrimaryKeys()) + : new HashSet<>(); Set removedBaseKeys = - basePrimaryKeys.stream().filter(key -> !targetPrimaryKeys.contains(key)).collect(Collectors.toSet()); + basePrimaryKeys.stream() + .filter(key -> !targetPrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String removedBaseKeyField : removedBaseKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " removal of the primary key field '" + removedBaseKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn.toString(), removedBaseKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " removal of the primary key field '" + + removedBaseKeyField + + "'") + .auditStamp(auditStamp) + .build()); } Set addedTargetKeys = - targetPrimaryKeys.stream().filter(key -> !basePrimaryKeys.contains(key)).collect(Collectors.toSet()); + targetPrimaryKeys.stream() + .filter(key -> !basePrimaryKeys.contains(key)) + .collect(Collectors.toSet()); for (String addedTargetKeyField : addedTargetKeys) { - primaryKeyChangeEvents.add(ChangeEvent.builder() - .category(ChangeCategory.TECHNICAL_SCHEMA) - .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) - .entityUrn(datasetUrn.toString()) - .operation(ChangeOperation.MODIFY) - .semVerChange(SemanticChangeType.MAJOR) - .description(BACKWARDS_INCOMPATIBLE_DESC + " addition of the primary key field '" + addedTargetKeyField + "'") - .auditStamp(auditStamp) - .build()); + primaryKeyChangeEvents.add( + ChangeEvent.builder() + .category(ChangeCategory.TECHNICAL_SCHEMA) + .modifier(getSchemaFieldUrn(datasetUrn, addedTargetKeyField).toString()) + .entityUrn(datasetUrn.toString()) + .operation(ChangeOperation.MODIFY) + .semVerChange(SemanticChangeType.MAJOR) + .description( + BACKWARDS_INCOMPATIBLE_DESC + + " addition of the primary key field '" + + addedTargetKeyField + + "'") + .auditStamp(auditStamp) + .build()); } return primaryKeyChangeEvents; } @Override - public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspect currentValue, - ChangeCategory changeCategory, JsonPatch rawDiff, boolean rawDiffRequested) { - if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) || !currentValue.getAspect() - .equals(SCHEMA_METADATA_ASPECT_NAME)) { + public ChangeTransaction getSemanticDiff( + EntityAspect previousValue, + EntityAspect currentValue, + ChangeCategory changeCategory, + JsonPatch rawDiff, + boolean rawDiffRequested) { + if (!previousValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME) + || !currentValue.getAspect().equals(SCHEMA_METADATA_ASPECT_NAME)) { throw new IllegalArgumentException("Aspect is not " + SCHEMA_METADATA_ASPECT_NAME); } @@ -462,9 +566,14 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec List changeEvents; try { - changeEvents = new ArrayList<>( - computeDiffs(baseSchema, targetSchema, DatasetUrn.createFromString(currentValue.getUrn()), changeCategory, - null)); + changeEvents = + new ArrayList<>( + computeDiffs( + baseSchema, + targetSchema, + DatasetUrn.createFromString(currentValue.getUrn()), + changeCategory, + null)); } catch (URISyntaxException e) { throw new IllegalArgumentException("Malformed DatasetUrn " + currentValue.getUrn()); } @@ -472,7 +581,9 @@ public ChangeTransaction getSemanticDiff(EntityAspect previousValue, EntityAspec // Assess the highest change at the transaction(schema) level. SemanticChangeType highestSematicChange = SemanticChangeType.NONE; changeEvents = - changeEvents.stream().filter(changeEvent -> changeEvent.getCategory() == changeCategory).collect(Collectors.toList()); + changeEvents.stream() + .filter(changeEvent -> changeEvent.getCategory() == changeCategory) + .collect(Collectors.toList()); ChangeEvent highestChangeEvent = changeEvents.stream().max(Comparator.comparing(ChangeEvent::getSemVerChange)).orElse(null); if (highestChangeEvent != null) { @@ -496,11 +607,17 @@ public List getChangeEvents( @Nonnull Aspect to, @Nonnull AuditStamp auditStamp) { final List changeEvents = new ArrayList<>(); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); - changeEvents.addAll(computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.DOCUMENTATION, auditStamp)); + changeEvents.addAll( + computeDiffs(from.getValue(), to.getValue(), urn, ChangeCategory.TAG, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.TECHNICAL_SCHEMA, auditStamp)); + changeEvents.addAll( + computeDiffs( + from.getValue(), to.getValue(), urn, ChangeCategory.GLOSSARY_TERM, auditStamp)); return changeEvents; } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java index d97a3fa4f65dd..da8cf3e3b49c9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/SingleDomainChangeEventGenerator.java @@ -13,22 +13,27 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - /** - * This is a simple differ that compares to Domains aspects and assumes that each domain - * will have a single domain (currently the semantic contract). + * This is a simple differ that compares to Domains aspects and assumes that each domain will have a + * single domain (currently the semantic contract). */ public class SingleDomainChangeEventGenerator extends EntityChangeEventGenerator { @Override - public List getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect from, @Nonnull Aspect to, @Nonnull AuditStamp auditStamp) { + public List getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect from, + @Nonnull Aspect to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List computeDiffs(Domains baseDomains, Domains targetDomains, String entityUrn, - AuditStamp auditStamp) { + private List computeDiffs( + Domains baseDomains, Domains targetDomains, String entityUrn, AuditStamp auditStamp) { - // Simply fetch the first element from each domains list and compare. If they are different, emit + // Simply fetch the first element from each domains list and compare. If they are different, + // emit // a domain ADD / REMOVE event. if (isDomainSet(baseDomains, targetDomains)) { return Collections.singletonList( @@ -72,8 +77,7 @@ private List computeDiffs(Domains baseDomains, Domains targetDomain .modifier(targetDomains.getDomains().get(0).toString()) .domainUrn(targetDomains.getDomains().get(0)) .auditStamp(auditStamp) - .build() - ); + .build()); } return Collections.emptyList(); @@ -88,7 +92,9 @@ private boolean isDomainUnset(@Nullable final Domains from, @Nullable final Doma } private boolean isDomainChanged(@Nullable final Domains from, @Nullable final Domains to) { - return !isDomainEmpty(from) && !isDomainEmpty(to) && !from.getDomains().get(0).equals(to.getDomains().get(0)); + return !isDomainEmpty(from) + && !isDomainEmpty(to) + && !from.getDomains().get(0).equals(to.getDomains().get(0)); } private boolean isDomainEmpty(@Nullable final Domains domains) { diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java index 17f6798586417..df8aa4dd4ca71 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeline/eventgenerator/StatusChangeEventGenerator.java @@ -11,28 +11,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Differ responsible for determining whether an entity has been soft-deleted or soft-created. - */ +/** Differ responsible for determining whether an entity has been soft-deleted or soft-created. */ public class StatusChangeEventGenerator extends EntityChangeEventGenerator { @Override - public List getChangeEvents(@Nonnull Urn urn, @Nonnull String entity, @Nonnull String aspect, - @Nonnull Aspect from, @Nonnull Aspect to, @Nonnull AuditStamp auditStamp) { + public List getChangeEvents( + @Nonnull Urn urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nonnull Aspect from, + @Nonnull Aspect to, + @Nonnull AuditStamp auditStamp) { return computeDiffs(from.getValue(), to.getValue(), urn.toString(), auditStamp); } - private List computeDiffs(Status baseStatus, Status targetStatus, String entityUrn, - AuditStamp auditStamp) { + private List computeDiffs( + Status baseStatus, Status targetStatus, String entityUrn, AuditStamp auditStamp) { // If the new status is "removed", then return a soft-deletion event. if (isRemoved(targetStatus)) { return Collections.singletonList( ChangeEvent.builder() - .category(ChangeCategory.LIFECYCLE) - .operation(ChangeOperation.SOFT_DELETE) - .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .category(ChangeCategory.LIFECYCLE) + .operation(ChangeOperation.SOFT_DELETE) + .auditStamp(auditStamp) + .entityUrn(entityUrn) + .build()); } // If the new status is "unremoved", then return an reinstatement event. @@ -42,7 +45,8 @@ private List computeDiffs(Status baseStatus, Status targetStatus, S .category(ChangeCategory.LIFECYCLE) .operation(ChangeOperation.REINSTATE) .auditStamp(auditStamp) - .entityUrn(entityUrn).build()); + .entityUrn(entityUrn) + .build()); } return Collections.emptyList(); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java index 3e8f83a531b59..eec7680a56ecb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/ElasticSearchTimeseriesAspectService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.elastic; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.Timer; import com.datahub.util.RecordUtils; import com.datahub.util.exception.ESQueryException; @@ -35,7 +37,6 @@ import com.linkedin.timeseries.DeleteAspectValuesResult; import com.linkedin.timeseries.GenericTable; import com.linkedin.timeseries.GroupingBucket; - import com.linkedin.timeseries.TimeseriesIndexSizeResult; import com.linkedin.util.Pair; import java.io.IOException; @@ -70,16 +71,21 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import static com.linkedin.metadata.Constants.*; - - @Slf4j -public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectService, ElasticSearchIndexed { +public class ElasticSearchTimeseriesAspectService + implements TimeseriesAspectService, ElasticSearchIndexed { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String TIMESTAMP_FIELD = "timestampMillis"; private static final String EVENT_FIELD = "event"; private static final Integer DEFAULT_LIMIT = 10000; @@ -92,9 +98,13 @@ public class ElasticSearchTimeseriesAspectService implements TimeseriesAspectSer private final ESAggregatedStatsDAO _esAggregatedStatsDAO; private final EntityRegistry _entityRegistry; - public ElasticSearchTimeseriesAspectService(@Nonnull RestHighLevelClient searchClient, - @Nonnull IndexConvention indexConvention, @Nonnull TimeseriesAspectIndexBuilders indexBuilders, - @Nonnull EntityRegistry entityRegistry, @Nonnull ESBulkProcessor bulkProcessor, int numRetries) { + public ElasticSearchTimeseriesAspectService( + @Nonnull RestHighLevelClient searchClient, + @Nonnull IndexConvention indexConvention, + @Nonnull TimeseriesAspectIndexBuilders indexBuilders, + @Nonnull EntityRegistry entityRegistry, + @Nonnull ESBulkProcessor bulkProcessor, + int numRetries) { _indexConvention = indexConvention; _indexBuilders = indexBuilders; _searchClient = searchClient; @@ -111,10 +121,14 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { Object event = docFields.get(EVENT_FIELD); GenericAspect genericAspect; try { - genericAspect = new GenericAspect().setValue( - ByteString.unsafeWrap(OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); + genericAspect = + new GenericAspect() + .setValue( + ByteString.unsafeWrap( + OBJECT_MAPPER.writeValueAsString(event).getBytes(StandardCharsets.UTF_8))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize event from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize event from the timeseries aspect index: " + e); } genericAspect.setContentType("application/json"); envelopedAspect.setAspect(genericAspect); @@ -122,9 +136,11 @@ private static EnvelopedAspect parseDocument(@Nonnull SearchHit doc) { if (systemMetadata != null) { try { envelopedAspect.setSystemMetadata( - RecordUtils.toRecordTemplate(SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); + RecordUtils.toRecordTemplate( + SystemMetadata.class, OBJECT_MAPPER.writeValueAsString(systemMetadata))); } catch (JsonProcessingException e) { - throw new RuntimeException("Failed to deserialize system metadata from the timeseries aspect index: " + e); + throw new RuntimeException( + "Failed to deserialize system metadata from the timeseries aspect index: " + e); } } @@ -141,7 +157,8 @@ public List buildReindexConfigs() { return _indexBuilders.buildReindexConfigs(); } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { return _indexBuilders.reindexAsync(index, filterQuery, options); } @@ -152,11 +169,14 @@ public void reindexAll() { } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, @Nonnull JsonNode document) { String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final UpdateRequest updateRequest = new UpdateRequest( - indexName, docId) + final UpdateRequest updateRequest = + new UpdateRequest(indexName, docId) .detectNoop(false) .docAsUpsert(true) .doc(document.toString(), XContentType.JSON) @@ -169,21 +189,29 @@ public List getIndexSizes() { List res = new ArrayList<>(); try { String indicesPattern = _indexConvention.getAllTimeseriesAspectIndicesPattern(); - Response r = _searchClient.getLowLevelClient().performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); + Response r = + _searchClient + .getLowLevelClient() + .performRequest(new Request("GET", "/" + indicesPattern + "/_stats")); JsonNode body = new ObjectMapper().readTree(r.getEntity().getContent()); - body.get("indices").fields().forEachRemaining(entry -> { - TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); - elemResult.setIndexName(entry.getKey()); - Optional> indexEntityAndAspect = _indexConvention.getEntityAndAspectName(entry.getKey()); - if (indexEntityAndAspect.isPresent()) { - elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); - elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); - } - int sizeBytes = entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); - float sizeMb = (float) sizeBytes / 1000; - elemResult.setSizeMb(sizeMb); - res.add(elemResult); - }); + body.get("indices") + .fields() + .forEachRemaining( + entry -> { + TimeseriesIndexSizeResult elemResult = new TimeseriesIndexSizeResult(); + elemResult.setIndexName(entry.getKey()); + Optional> indexEntityAndAspect = + _indexConvention.getEntityAndAspectName(entry.getKey()); + if (indexEntityAndAspect.isPresent()) { + elemResult.setEntityName(indexEntityAndAspect.get().getFirst()); + elemResult.setAspectName(indexEntityAndAspect.get().getSecond()); + } + int sizeBytes = + entry.getValue().get("primaries").get("store").get("size_in_bytes").asInt(); + float sizeMb = (float) sizeBytes / 1000; + elemResult.setSizeMb(sizeMb); + res.add(elemResult); + }); return res; } catch (IOException e) { throw new RuntimeException(e); @@ -194,10 +222,10 @@ public List getIndexSizes() { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ) { + @Nullable final Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); CountRequest countRequest = new CountRequest(); countRequest.query(filterQueryBuilder); countRequest.indices(indexName); @@ -220,20 +248,25 @@ public List getAspectValues( @Nullable final Integer limit, @Nullable final Filter filter, @Nullable final SortCriterion sort) { - final BoolQueryBuilder filterQueryBuilder = QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); + final BoolQueryBuilder filterQueryBuilder = + QueryBuilders.boolQuery().must(ESUtils.buildFilterQuery(filter, true)); filterQueryBuilder.must(QueryBuilders.matchQuery("urn", urn.toString())); // NOTE: We are interested only in the un-exploded rows as only they carry the `event` payload. filterQueryBuilder.mustNot(QueryBuilders.termQuery(MappingsBuilder.IS_EXPLODED_FIELD, true)); if (startTimeMillis != null) { - Criterion startTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTimeMillis.toString()); + Criterion startTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(startTimeCriterion, true)); } if (endTimeMillis != null) { - Criterion endTimeCriterion = new Criterion().setField(TIMESTAMP_FIELD) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTimeMillis.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(TIMESTAMP_FIELD) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTimeMillis.toString()); filterQueryBuilder.must(ESUtils.getQueryBuilderFromCriterion(endTimeCriterion, true)); } final SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); @@ -242,7 +275,8 @@ public List getAspectValues( if (sort != null) { final SortOrder esSortOrder = - (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) ? SortOrder.ASC + (sort.getOrder() == com.linkedin.metadata.query.filter.SortOrder.ASCENDING) + ? SortOrder.ASC : SortOrder.DESC; searchSourceBuilder.sort(SortBuilders.fieldSort(sort.getField()).order(esSortOrder)); } else { @@ -258,8 +292,10 @@ public List getAspectValues( log.debug("Search request is: " + searchRequest); SearchHits hits; - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), "searchAspectValues_search").time()) { + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); hits = searchResponse.getHits(); } catch (Exception e) { log.error("Search query failed:", e); @@ -272,17 +308,23 @@ public List getAspectValues( @Override @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { - return _esAggregatedStatsDAO.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + return _esAggregatedStatsDAO.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); } /** - * A generic delete by filter API which uses elasticsearch's deleteByQuery. - * NOTE: There is no need for the client to explicitly walk each scroll page with this approach. Elastic will synchronously - * delete all of the documents matching the query that is specified by the filter, and internally handles the batching logic - * by the scroll page size specified(i.e. the DEFAULT_LIMIT value of 10,000). + * A generic delete by filter API which uses elasticsearch's deleteByQuery. NOTE: There is no need + * for the client to explicitly walk each scroll page with this approach. Elastic will + * synchronously delete all of the documents matching the query that is specified by the filter, + * and internally handles the batching logic by the scroll page size specified(i.e. the + * DEFAULT_LIMIT value of 10,000). + * * @param entityName the name of the entity. * @param aspectName the name of the aspect. * @param filter the filter to be used for deletion of the documents on the index. @@ -290,14 +332,18 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri */ @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); - final Optional result = _bulkProcessor - .deleteByQuery(filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) - .map(response -> new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); + final Optional result = + _bulkProcessor + .deleteByQuery( + filterQueryBuilder, false, DEFAULT_LIMIT, TimeValue.timeValueMinutes(10), indexName) + .map( + response -> + new DeleteAspectValuesResult().setNumDocsDeleted(response.getDeleted())); if (result.isPresent()) { return result.get(); @@ -309,14 +355,20 @@ public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @ @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); final int batchSize = options.getBatchSize() > 0 ? options.getBatchSize() : DEFAULT_LIMIT; - TimeValue timeout = options.getTimeoutSeconds() > 0 ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) : null; - final Optional result = _bulkProcessor - .deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); + TimeValue timeout = + options.getTimeoutSeconds() > 0 + ? TimeValue.timeValueSeconds(options.getTimeoutSeconds()) + : null; + final Optional result = + _bulkProcessor.deleteByQueryAsync(filterQueryBuilder, false, batchSize, timeout, indexName); if (result.isPresent()) { return result.get().getTask(); @@ -327,7 +379,10 @@ public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull Strin } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { final String indexName = _indexConvention.getTimeseriesAspectIndexName(entityName, aspectName); final BoolQueryBuilder filterQueryBuilder = ESUtils.buildFilterQuery(filter, true); @@ -350,10 +405,16 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) for (Map.Entry entry : _entityRegistry.getEntitySpecs().entrySet()) { for (AspectSpec aspectSpec : entry.getValue().getAspectSpecs()) { if (aspectSpec.isTimeseries()) { - DeleteAspectValuesResult result = this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); - rollbackResult.setNumDocsDeleted(rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); - log.info("Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", entry.getKey(), - aspectSpec.getName(), runId, result.getNumDocsDeleted()); + DeleteAspectValuesResult result = + this.deleteAspectValues(entry.getKey(), aspectSpec.getName(), filter); + rollbackResult.setNumDocsDeleted( + rollbackResult.getNumDocsDeleted() + result.getNumDocsDeleted()); + log.info( + "Number of timeseries docs deleted for entity:{}, aspect:{}, runId:{}={}", + entry.getKey(), + aspectSpec.getName(), + runId, + result.getNumDocsDeleted()); } } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java index 37a5dc304cf7f..5bb523c8a8c1e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java @@ -8,7 +8,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class MappingsBuilder { public static final String URN_FIELD = "urn"; @@ -24,13 +23,14 @@ public class MappingsBuilder { public static final String PARTITION_SPEC_TIME_PARTITION = "timePartition"; public static final String RUN_ID_FIELD = "runId"; - private MappingsBuilder() { - } + private MappingsBuilder() {} public static Map getMappings(@Nonnull final AspectSpec aspectSpec) { if (!aspectSpec.isTimeseries()) { throw new IllegalArgumentException( - String.format("Cannot apply timeseries field indexing for a non-timeseries aspect %s", aspectSpec.getName())); + String.format( + "Cannot apply timeseries field indexing for a non-timeseries aspect %s", + aspectSpec.getName())); } Map mappings = new HashMap<>(); @@ -41,16 +41,24 @@ public static Map getMappings(@Nonnull final AspectSpec aspectSp mappings.put(TIMESTAMP_FIELD, ImmutableMap.of("type", "date")); mappings.put(TIMESTAMP_MILLIS_FIELD, ImmutableMap.of("type", "date")); mappings.put(EVENT_GRANULARITY, ImmutableMap.of("type", "keyword")); - mappings.put(PARTITION_SPEC, ImmutableMap.of("properties", - ImmutableMap.of(PARTITION_SPEC_PARTITION, ImmutableMap.of("type", "keyword"), PARTITION_SPEC_TIME_PARTITION, - ImmutableMap.of("type", "keyword")))); + mappings.put( + PARTITION_SPEC, + ImmutableMap.of( + "properties", + ImmutableMap.of( + PARTITION_SPEC_PARTITION, + ImmutableMap.of("type", "keyword"), + PARTITION_SPEC_TIME_PARTITION, + ImmutableMap.of("type", "keyword")))); mappings.put(EVENT_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(SYSTEM_METADATA_FIELD, ImmutableMap.of("type", "object", "enabled", false)); mappings.put(IS_EXPLODED_FIELD, ImmutableMap.of("type", "boolean")); - aspectSpec.getTimeseriesFieldSpecs() + aspectSpec + .getTimeseriesFieldSpecs() .forEach(x -> mappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); - aspectSpec.getTimeseriesFieldCollectionSpecs() + aspectSpec + .getTimeseriesFieldCollectionSpecs() .forEach(x -> mappings.put(x.getName(), getTimeseriesFieldCollectionSpecMapping(x))); return ImmutableMap.of("properties", mappings); @@ -59,11 +67,16 @@ public static Map getMappings(@Nonnull final AspectSpec aspectSp private static Map getTimeseriesFieldCollectionSpecMapping( TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec) { Map collectionMappings = new HashMap<>(); - collectionMappings.put(timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + collectionMappings.put( + timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(), getFieldMapping(DataSchema.Type.STRING)); - timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap() + timeseriesFieldCollectionSpec + .getTimeseriesFieldSpecMap() .values() - .forEach(x -> collectionMappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); + .forEach( + x -> + collectionMappings.put( + x.getName(), getFieldMapping(x.getPegasusSchema().getType()))); return ImmutableMap.of("properties", collectionMappings); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java index e9518ed8c39fa..564bcb2a242cb 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/TimeseriesAspectIndexBuilders.java @@ -7,19 +7,17 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.timeseries.BatchWriteOperationsOptions; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import com.linkedin.util.Pair; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import com.linkedin.util.Pair; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.QueryBuilder; - @Slf4j @RequiredArgsConstructor public class TimeseriesAspectIndexBuilders implements ElasticSearchIndexed { @@ -38,7 +36,8 @@ public void reindexAll() { } } - public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) + public String reindexAsync( + String index, @Nullable QueryBuilder filterQuery, BatchWriteOperationsOptions options) throws Exception { Optional> entityAndAspect = _indexConvention.getEntityAndAspectName(index); if (entityAndAspect.isEmpty()) { @@ -54,31 +53,42 @@ public String reindexAsync(String index, @Nullable QueryBuilder filterQuery, Bat } } if (!entitySpec.hasAspect(aspectName)) { - throw new IllegalArgumentException(String.format("Could not find aspect %s of entity %s", aspectName, entityName)); + throw new IllegalArgumentException( + String.format("Could not find aspect %s of entity %s", aspectName, entityName)); } - ReindexConfig config = _indexBuilder.buildReindexState(index, - MappingsBuilder.getMappings(_entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), - Collections.emptyMap()); + ReindexConfig config = + _indexBuilder.buildReindexState( + index, + MappingsBuilder.getMappings( + _entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName)), + Collections.emptyMap()); return _indexBuilder.reindexInPlaceAsync(index, filterQuery, options, config); } @Override public List buildReindexConfigs() { return _entityRegistry.getEntitySpecs().values().stream() - .flatMap(entitySpec -> entitySpec.getAspectSpecs().stream() + .flatMap( + entitySpec -> + entitySpec.getAspectSpecs().stream() .map(aspectSpec -> Pair.of(entitySpec, aspectSpec))) - .filter(pair -> pair.getSecond().isTimeseries()) - .map(pair -> { + .filter(pair -> pair.getSecond().isTimeseries()) + .map( + pair -> { try { return _indexBuilder.buildReindexState( - _indexConvention.getTimeseriesAspectIndexName(pair.getFirst().getName(), pair.getSecond().getName()), - MappingsBuilder.getMappings(pair.getSecond()), Collections.emptyMap()); + _indexConvention.getTimeseriesAspectIndexName( + pair.getFirst().getName(), pair.getSecond().getName()), + MappingsBuilder.getMappings(pair.getSecond()), + Collections.emptyMap()); } catch (IOException e) { - log.error("Issue while building timeseries field index for entity {} aspect {}", pair.getFirst().getName(), - pair.getSecond().getName()); + log.error( + "Issue while building timeseries field index for entity {} aspect {}", + pair.getFirst().getName(), + pair.getSecond().getName()); throw new RuntimeException(e); } - }).collect(Collectors.toList()); + }) + .collect(Collectors.toList()); } - } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java index 316d25d1f37f4..539e5dfbaa1d0 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/query/ESAggregatedStatsDAO.java @@ -49,7 +49,6 @@ import org.opensearch.search.aggregations.pipeline.ParsedBucketMetricValue; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j public class ESAggregatedStatsDAO { private static final String ES_AGGREGATION_PREFIX = "agg_"; @@ -66,7 +65,9 @@ public class ESAggregatedStatsDAO { private final RestHighLevelClient _searchClient; private final EntityRegistry _entityRegistry; - public ESAggregatedStatsDAO(@Nonnull IndexConvention indexConvention, @Nonnull RestHighLevelClient searchClient, + public ESAggregatedStatsDAO( + @Nonnull IndexConvention indexConvention, + @Nonnull RestHighLevelClient searchClient, @Nonnull EntityRegistry entityRegistry) { _indexConvention = indexConvention; _searchClient = searchClient; @@ -94,7 +95,8 @@ private static String getAggregationSpecAggDisplayName(final AggregationSpec agg prefix = "cardinality_"; break; default: - throw new IllegalArgumentException("Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); + throw new IllegalArgumentException( + "Unknown AggregationSpec type" + aggregationSpec.getAggregationType()); } return prefix + aggregationSpec.getFieldPath(); } @@ -103,12 +105,19 @@ private static String getGroupingBucketAggName(final GroupingBucket groupingBuck if (groupingBucket.getType() == GroupingBucketType.DATE_GROUPING_BUCKET) { return toEsAggName(ES_AGGREGATION_PREFIX + groupingBucket.getKey()); } - return toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); + return toEsAggName( + ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + groupingBucket.getKey()); } - private static void rowGenHelper(final Aggregations lowestAggs, final int curLevel, final int lastLevel, - final List rows, final Stack row, final ImmutableList groupingBuckets, - final ImmutableList aggregationSpecs, AspectSpec aspectSpec) { + private static void rowGenHelper( + final Aggregations lowestAggs, + final int curLevel, + final int lastLevel, + final List rows, + final Stack row, + final ImmutableList groupingBuckets, + final ImmutableList aggregationSpecs, + AspectSpec aspectSpec) { if (curLevel == lastLevel) { // (Base-case): We are at the lowest level of nested bucket aggregations. // Append member aggregation values to the row and add the row to the output. @@ -123,7 +132,7 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.pop(); } } else if (curLevel < lastLevel) { - //(Recursive-case): We are still processing the nested group-by multi-bucket aggregations. + // (Recursive-case): We are still processing the nested group-by multi-bucket aggregations. // For each bucket, add the key to the row and recur down for full row construction. GroupingBucket curGroupingBucket = groupingBuckets.get(curLevel); String curGroupingBucketAggName = getGroupingBucketAggName(curGroupingBucket); @@ -136,7 +145,14 @@ private static void rowGenHelper(final Aggregations lowestAggs, final int curLev row.push(b.getKeyAsString()); } // Recur down - rowGenHelper(b.getAggregations(), curLevel + 1, lastLevel, rows, row, groupingBuckets, aggregationSpecs, + rowGenHelper( + b.getAggregations(), + curLevel + 1, + lastLevel, + rows, + row, + groupingBuckets, + aggregationSpecs, aspectSpec); // Remove the row value we have added for this level. row.pop(); @@ -179,11 +195,12 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str if (fieldPath.equals(MappingsBuilder.EVENT_GRANULARITY)) { return DataSchema.Type.RECORD; } - + String[] memberParts = fieldPath.split("\\."); if (memberParts.length == 1) { // Search in the timeseriesFieldSpecs. - TimeseriesFieldSpec timeseriesFieldSpec = aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); + TimeseriesFieldSpec timeseriesFieldSpec = + aspectSpec.getTimeseriesFieldSpecMap().get(memberParts[0]); if (timeseriesFieldSpec != null) { return timeseriesFieldSpec.getPegasusSchema().getType(); } @@ -196,8 +213,8 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str } else if (memberParts.length == 2) { // Check if partitionSpec if (memberParts[0].equals(MappingsBuilder.PARTITION_SPEC)) { - if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) || memberParts[1].equals( - MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { + if (memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_PARTITION) + || memberParts[1].equals(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION)) { return DataSchema.Type.STRING; } else { throw new IllegalArgumentException("Unknown partitionSpec member" + memberParts[1]); @@ -208,44 +225,53 @@ private static DataSchema.Type getTimeseriesFieldType(AspectSpec aspectSpec, Str TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec = aspectSpec.getTimeseriesFieldCollectionSpecMap().get(memberParts[0]); if (timeseriesFieldCollectionSpec != null) { - if (timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey().equals(memberParts[1])) { + if (timeseriesFieldCollectionSpec + .getTimeseriesFieldCollectionAnnotation() + .getKey() + .equals(memberParts[1])) { // Matched against the collection stat key. return DataSchema.Type.STRING; } - TimeseriesFieldSpec tsFieldSpec = timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); + TimeseriesFieldSpec tsFieldSpec = + timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap().get(memberParts[1]); if (tsFieldSpec != null) { // Matched against a collection stat field. return tsFieldSpec.getPegasusSchema().getType(); } } } - throw new IllegalArgumentException("Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); + throw new IllegalArgumentException( + "Unknown TimeseriesField or TimeseriesFieldCollection: " + fieldPath); } - private static DataSchema.Type getGroupingBucketKeyType(@Nonnull AspectSpec aspectSpec, - @Nonnull GroupingBucket groupingBucket) { + private static DataSchema.Type getGroupingBucketKeyType( + @Nonnull AspectSpec aspectSpec, @Nonnull GroupingBucket groupingBucket) { return getTimeseriesFieldType(aspectSpec, groupingBucket.getKey()); } - private static DataSchema.Type getAggregationSpecMemberType(@Nonnull AspectSpec aspectSpec, - @Nonnull AggregationSpec aggregationSpec) { + private static DataSchema.Type getAggregationSpecMemberType( + @Nonnull AspectSpec aspectSpec, @Nonnull AggregationSpec aggregationSpec) { return getTimeseriesFieldType(aspectSpec, aggregationSpec.getFieldPath()); } - private static List genColumnNames(GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { - List groupingBucketNames = Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); + private static List genColumnNames( + GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { + List groupingBucketNames = + Arrays.stream(groupingBuckets).map(t -> t.getKey()).collect(Collectors.toList()); - List aggregationNames = Arrays.stream(aggregationSpecs) - .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) - .collect(Collectors.toList()); + List aggregationNames = + Arrays.stream(aggregationSpecs) + .map(ESAggregatedStatsDAO::getAggregationSpecAggDisplayName) + .collect(Collectors.toList()); List columnNames = - Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()).collect(Collectors.toList()); + Stream.concat(groupingBucketNames.stream(), aggregationNames.stream()) + .collect(Collectors.toList()); return columnNames; } - private static List genColumnTypes(AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs) { + private static List genColumnTypes( + AspectSpec aspectSpec, GroupingBucket[] groupingBuckets, AggregationSpec[] aggregationSpecs) { List columnTypes = new ArrayList<>(); for (GroupingBucket g : groupingBuckets) { DataSchema.Type type = getGroupingBucketKeyType(aspectSpec, g); @@ -282,14 +308,17 @@ private static List genColumnTypes(AspectSpec aspectSpec, GroupingBucket break; default: throw new IllegalArgumentException( - "Type generation not yet supported for aggregation type: " + aggregationSpec.getAggregationType()); + "Type generation not yet supported for aggregation type: " + + aggregationSpec.getAggregationType()); } } return columnTypes; } - private static String extractAggregationValue(@Nonnull final Aggregations aggregations, - @Nonnull final AspectSpec aspectSpec, @Nonnull final AggregationSpec aggregationSpec) { + private static String extractAggregationValue( + @Nonnull final Aggregations aggregations, + @Nonnull final AspectSpec aspectSpec, + @Nonnull final AggregationSpec aggregationSpec) { String memberAggName = getAggregationSpecAggESName(aggregationSpec); Object memberAgg = aggregations.get(memberAggName); DataSchema.Type memberType = getAggregationSpecMemberType(aspectSpec, aggregationSpec); @@ -309,36 +338,42 @@ private static String extractAggregationValue(@Nonnull final Aggregations aggreg case FLOAT: return String.valueOf(((ParsedSum) memberAgg).getValue()); default: - throw new IllegalArgumentException("Unexpected type encountered for sum aggregation: " + memberType); + throw new IllegalArgumentException( + "Unexpected type encountered for sum aggregation: " + memberType); } } else if (memberAgg instanceof ParsedCardinality) { // This will always be a long value as string. return String.valueOf(((ParsedCardinality) memberAgg).getValue()); } else { - throw new UnsupportedOperationException("Member aggregations other than latest and sum not supported yet."); + throw new UnsupportedOperationException( + "Member aggregations other than latest and sum not supported yet."); } return defaultValue; } - private AspectSpec getTimeseriesAspectSpec(@Nonnull String entityName, @Nonnull String aspectName) { + private AspectSpec getTimeseriesAspectSpec( + @Nonnull String entityName, @Nonnull String aspectName) { EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityName); AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - new IllegalArgumentException(String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); + new IllegalArgumentException( + String.format("Unrecognized aspect name {} for entity {}", aspectName, entityName)); } else if (!aspectSpec.isTimeseries()) { new IllegalArgumentException( - String.format("aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); + String.format( + "aspect name {} for entity {} is not a timeseries aspect", aspectName, entityName)); } return aspectSpec; } - /** - * Get the aggregated metrics for the given dataset or column from a time series aspect. - */ + /** Get the aggregated metrics for the given dataset or column from a time series aspect. */ @Nonnull - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { // Setup the filter query builder using the input filter provided. @@ -371,51 +406,62 @@ public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull Stri log.debug("Search request is: " + searchRequest); try { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); - return generateResponseFromElastic(searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); + return generateResponseFromElastic( + searchResponse, groupingBuckets, aggregationSpecs, aspectSpec); } catch (Exception e) { log.error("Search query failed: " + e.getMessage()); throw new ESQueryException("Search query failed:", e); } } - private void addAggregationBuildersFromAggregationSpec(AspectSpec aspectSpec, AggregationBuilder baseAggregation, - AggregationSpec aggregationSpec) { + private void addAggregationBuildersFromAggregationSpec( + AspectSpec aspectSpec, AggregationBuilder baseAggregation, AggregationSpec aggregationSpec) { String fieldPath = aggregationSpec.getFieldPath(); String esFieldName = fieldPath; switch (aggregationSpec.getAggregationType()) { case LATEST: // Construct the terms aggregation with a max timestamp sub-aggregation. - String termsAggName = toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); - AggregationBuilder termsAgg = AggregationBuilders.terms(termsAggName) - .field(esFieldName) - .size(MAX_TERM_BUCKETS) - .subAggregation(AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); + String termsAggName = + toEsAggName(ES_AGGREGATION_PREFIX + ES_TERMS_AGGREGATION_PREFIX + fieldPath); + AggregationBuilder termsAgg = + AggregationBuilders.terms(termsAggName) + .field(esFieldName) + .size(MAX_TERM_BUCKETS) + .subAggregation( + AggregationBuilders.max(ES_AGG_MAX_TIMESTAMP).field(ES_FIELD_TIMESTAMP)); baseAggregation.subAggregation(termsAgg); // Construct the max_bucket pipeline aggregation MaxBucketPipelineAggregationBuilder maxBucketPipelineAgg = - PipelineAggregatorBuilders.maxBucket(getAggregationSpecAggESName(aggregationSpec), + PipelineAggregatorBuilders.maxBucket( + getAggregationSpecAggESName(aggregationSpec), termsAggName + ">" + ES_AGG_MAX_TIMESTAMP); baseAggregation.subAggregation(maxBucketPipelineAgg); break; case SUM: AggregationBuilder sumAgg = - AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.sum(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(sumAgg); break; case CARDINALITY: AggregationBuilder cardinalityAgg = - AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)).field(esFieldName); + AggregationBuilders.cardinality(getAggregationSpecAggESName(aggregationSpec)) + .field(esFieldName); baseAggregation.subAggregation(cardinalityAgg); break; default: - throw new IllegalStateException("Unexpected value: " + aggregationSpec.getAggregationType()); + throw new IllegalStateException( + "Unexpected value: " + aggregationSpec.getAggregationType()); } } - private Pair makeGroupingAggregationBuilder(AspectSpec aspectSpec, - @Nullable AggregationBuilder baseAggregationBuilder, @Nullable GroupingBucket[] groupingBuckets) { + private Pair makeGroupingAggregationBuilder( + AspectSpec aspectSpec, + @Nullable AggregationBuilder baseAggregationBuilder, + @Nullable GroupingBucket[] groupingBuckets) { AggregationBuilder firstAggregationBuilder = baseAggregationBuilder; AggregationBuilder lastAggregationBuilder = baseAggregationBuilder; @@ -427,18 +473,20 @@ private Pair makeGroupingAggregationBuil if (!curGroupingBucket.getKey().equals(ES_FIELD_TIMESTAMP)) { throw new IllegalArgumentException("Date Grouping bucket is not:" + ES_FIELD_TIMESTAMP); } - curAggregationBuilder = AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) - .field(ES_FIELD_TIMESTAMP) - .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); + curAggregationBuilder = + AggregationBuilders.dateHistogram(ES_AGG_TIMESTAMP) + .field(ES_FIELD_TIMESTAMP) + .calendarInterval(getHistogramInterval(curGroupingBucket.getTimeWindowSize())); } else if (curGroupingBucket.getType() == GroupingBucketType.STRING_GROUPING_BUCKET) { // Process the string grouping bucket using the 'terms' aggregation. // The field can be Keyword, Numeric, ip, boolean, or binary. String fieldName = ESUtils.toKeywordField(curGroupingBucket.getKey(), true); DataSchema.Type fieldType = getGroupingBucketKeyType(aspectSpec, curGroupingBucket); - curAggregationBuilder = AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) - .field(fieldName) - .size(MAX_TERM_BUCKETS) - .order(BucketOrder.aggregation("_key", true)); + curAggregationBuilder = + AggregationBuilders.terms(getGroupingBucketAggName(curGroupingBucket)) + .field(fieldName) + .size(MAX_TERM_BUCKETS) + .order(BucketOrder.aggregation("_key", true)); } if (firstAggregationBuilder == null) { firstAggregationBuilder = curAggregationBuilder; @@ -453,8 +501,11 @@ private Pair makeGroupingAggregationBuil return Pair.of(firstAggregationBuilder, lastAggregationBuilder); } - private GenericTable generateResponseFromElastic(SearchResponse searchResponse, GroupingBucket[] groupingBuckets, - AggregationSpec[] aggregationSpecs, AspectSpec aspectSpec) { + private GenericTable generateResponseFromElastic( + SearchResponse searchResponse, + GroupingBucket[] groupingBuckets, + AggregationSpec[] aggregationSpecs, + AspectSpec aspectSpec) { GenericTable resultTable = new GenericTable(); // 1. Generate the column names. @@ -470,8 +521,15 @@ private GenericTable generateResponseFromElastic(SearchResponse searchResponse, Aggregations aggregations = searchResponse.getAggregations(); Stack rowAcc = new Stack<>(); - rowGenHelper(aggregations, 0, groupingBuckets.length, rows, rowAcc, - ImmutableList.copyOf(groupingBuckets), ImmutableList.copyOf(aggregationSpecs), aspectSpec); + rowGenHelper( + aggregations, + 0, + groupingBuckets.length, + rows, + rowAcc, + ImmutableList.copyOf(groupingBuckets), + ImmutableList.copyOf(aggregationSpecs), + aspectSpec); if (!rowAcc.isEmpty()) { throw new IllegalStateException("Expected stack to be empty."); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java index 4e41603ca1411..c0f66acaaca5a 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/timeseries/transformer/TimeseriesAspectTransformer.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeseries.transformer; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -31,25 +33,28 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; -import static com.linkedin.metadata.Constants.*; - - -/** - * Class that provides a utility function that transforms the timeseries aspect into a document - */ +/** Class that provides a utility function that transforms the timeseries aspect into a document */ @Slf4j public class TimeseriesAspectTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } - private TimeseriesAspectTransformer() { - } + private TimeseriesAspectTransformer() {} - public static Map transform(@Nonnull final Urn urn, @Nonnull final RecordTemplate timeseriesAspect, - @Nonnull final AspectSpec aspectSpec, @Nullable final SystemMetadata systemMetadata) + public static Map transform( + @Nonnull final Urn urn, + @Nonnull final RecordTemplate timeseriesAspect, + @Nonnull final AspectSpec aspectSpec, + @Nullable final SystemMetadata systemMetadata) throws JsonProcessingException { ObjectNode commonDocument = getCommonDocument(urn, timeseriesAspect, systemMetadata); Map finalDocuments = new HashMap<>(); @@ -58,9 +63,12 @@ public static Map transform(@Nonnull final Urn urn, @Nonnull f ObjectNode document = JsonNodeFactory.instance.objectNode(); document.setAll(commonDocument); document.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(false)); - document.set(MappingsBuilder.EVENT_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); + document.set( + MappingsBuilder.EVENT_FIELD, + OBJECT_MAPPER.readTree(RecordUtils.toJsonString(timeseriesAspect))); if (systemMetadata != null) { - document.set(MappingsBuilder.SYSTEM_METADATA_FIELD, + document.set( + MappingsBuilder.SYSTEM_METADATA_FIELD, OBJECT_MAPPER.readTree(RecordUtils.toJsonString(systemMetadata))); } final Map> timeseriesFieldValueMap = @@ -70,22 +78,30 @@ public static Map transform(@Nonnull final Urn urn, @Nonnull f // Create new rows for the member collection fields. final Map> timeseriesFieldCollectionValueMap = - FieldExtractor.extractFields(timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); + FieldExtractor.extractFields( + timeseriesAspect, aspectSpec.getTimeseriesFieldCollectionSpecs()); timeseriesFieldCollectionValueMap.forEach( - (key, values) -> finalDocuments.putAll(getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); + (key, values) -> + finalDocuments.putAll( + getTimeseriesFieldCollectionDocuments(key, values, commonDocument))); return finalDocuments; } - private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final RecordTemplate timeseriesAspect, + private static ObjectNode getCommonDocument( + @Nonnull final Urn urn, + final RecordTemplate timeseriesAspect, @Nullable final SystemMetadata systemMetadata) { if (!timeseriesAspect.data().containsKey(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)) { - throw new IllegalArgumentException("Input timeseries aspect does not contain a timestampMillis field"); + throw new IllegalArgumentException( + "Input timeseries aspect does not contain a timestampMillis field"); } ObjectNode document = JsonNodeFactory.instance.objectNode(); document.put(MappingsBuilder.URN_FIELD, urn.toString()); - document.put(MappingsBuilder.TIMESTAMP_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); - document.put(MappingsBuilder.TIMESTAMP_MILLIS_FIELD, + document.put( + MappingsBuilder.TIMESTAMP_MILLIS_FIELD, (Long) timeseriesAspect.data().get(MappingsBuilder.TIMESTAMP_MILLIS_FIELD)); if (systemMetadata != null && systemMetadata.getRunId() != null) { // We need this as part of the common document for rollback support. @@ -94,7 +110,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object eventGranularity = timeseriesAspect.data().get(MappingsBuilder.EVENT_GRANULARITY); if (eventGranularity != null) { try { - document.put(MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); + document.put( + MappingsBuilder.EVENT_GRANULARITY, OBJECT_MAPPER.writeValueAsString(eventGranularity)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert eventGranulairty to Json string!", e); } @@ -105,7 +122,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record Object partition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_PARTITION); Object timePartition = partitionSpec.get(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION); if (partition != null && timePartition != null) { - throw new IllegalArgumentException("Both partition and timePartition cannot be specified in partitionSpec!"); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be specified in partitionSpec!"); } else if (partition != null) { ObjectNode partitionDoc = JsonNodeFactory.instance.objectNode(); partitionDoc.put(MappingsBuilder.PARTITION_SPEC_PARTITION, partition.toString()); @@ -113,14 +131,16 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record } else if (timePartition != null) { ObjectNode timePartitionDoc = JsonNodeFactory.instance.objectNode(); try { - timePartitionDoc.put(MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, + timePartitionDoc.put( + MappingsBuilder.PARTITION_SPEC_TIME_PARTITION, OBJECT_MAPPER.writeValueAsString(timePartition)); } catch (JsonProcessingException e) { throw new IllegalArgumentException("Failed to convert timePartition to Json string!", e); } document.set(MappingsBuilder.PARTITION_SPEC, timePartitionDoc); } else { - throw new IllegalArgumentException("Both partition and timePartition cannot be null in partitionSpec."); + throw new IllegalArgumentException( + "Both partition and timePartition cannot be null in partitionSpec."); } } String messageId = (String) timeseriesAspect.data().get(MappingsBuilder.MESSAGE_ID_FIELD); @@ -131,8 +151,8 @@ private static ObjectNode getCommonDocument(@Nonnull final Urn urn, final Record return document; } - private static void setTimeseriesField(final ObjectNode document, final TimeseriesFieldSpec fieldSpec, - List valueList) { + private static void setTimeseriesField( + final ObjectNode document, final TimeseriesFieldSpec fieldSpec, List valueList) { if (valueList.size() == 0) { return; } @@ -154,21 +174,26 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri case ARRAY: ArrayDataSchema dataSchema = (ArrayDataSchema) fieldSpec.getPegasusSchema(); if (valueList.get(0) instanceof List) { - // This is the hack for non-stat-collection array fields. They will end up getting oddly serialized to a string otherwise. + // This is the hack for non-stat-collection array fields. They will end up getting oddly + // serialized to a string otherwise. valueList = (List) valueList.get(0); } ArrayNode arrayNode = JsonNodeFactory.instance.arrayNode(valueList.size()); - valueList.stream().map(x -> { - if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { - try { - return OBJECT_MAPPER.writeValueAsString(x); - } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert collection element to Json string!", e); - } - } else { - return x.toString(); - } - }).forEach(arrayNode::add); + valueList.stream() + .map( + x -> { + if (dataSchema.getItems().getType() == DataSchema.Type.RECORD) { + try { + return OBJECT_MAPPER.writeValueAsString(x); + } catch (JsonProcessingException e) { + throw new IllegalArgumentException( + "Failed to convert collection element to Json string!", e); + } + } else { + return x.toString(); + } + }) + .forEach(arrayNode::add); valueNode = JsonNodeFactory.instance.textNode(arrayNode.toString()); break; case RECORD: @@ -189,15 +214,21 @@ private static void setTimeseriesField(final ObjectNode document, final Timeseri } private static Map getTimeseriesFieldCollectionDocuments( - final TimeseriesFieldCollectionSpec fieldSpec, final List values, final ObjectNode commonDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final List values, + final ObjectNode commonDocument) { return values.stream() .map(value -> getTimeseriesFieldCollectionDocument(fieldSpec, value, commonDocument)) .collect( - Collectors.toMap(keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), Pair::getSecond)); + Collectors.toMap( + keyDocPair -> getDocId(keyDocPair.getSecond(), keyDocPair.getFirst()), + Pair::getSecond)); } private static Pair getTimeseriesFieldCollectionDocument( - final TimeseriesFieldCollectionSpec fieldSpec, final Object value, final ObjectNode timeseriesInfoDocument) { + final TimeseriesFieldCollectionSpec fieldSpec, + final Object value, + final ObjectNode timeseriesInfoDocument) { ObjectNode finalDocument = JsonNodeFactory.instance.objectNode(); finalDocument.setAll(timeseriesInfoDocument); RecordTemplate collectionComponent = (RecordTemplate) value; @@ -205,18 +236,24 @@ private static Pair getTimeseriesFieldCollectionDocument( Optional key = RecordUtils.getFieldValue(collectionComponent, fieldSpec.getKeyPath()); if (!key.isPresent()) { throw new IllegalArgumentException( - String.format("Key %s for timeseries collection field %s is missing", fieldSpec.getKeyPath(), - fieldSpec.getName())); + String.format( + "Key %s for timeseries collection field %s is missing", + fieldSpec.getKeyPath(), fieldSpec.getName())); } - componentDocument.set(fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), + componentDocument.set( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getKey(), JsonNodeFactory.instance.textNode(key.get().toString())); - Map> statFields = FieldExtractor.extractFields(collectionComponent, - new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); + Map> statFields = + FieldExtractor.extractFields( + collectionComponent, new ArrayList<>(fieldSpec.getTimeseriesFieldSpecMap().values())); statFields.forEach((k, v) -> setTimeseriesField(componentDocument, k, v)); finalDocument.set(fieldSpec.getName(), componentDocument); - finalDocument.set(MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); - // Return the pair of component key and the document. We use the key later to build the unique docId. - return new Pair<>(fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), + finalDocument.set( + MappingsBuilder.IS_EXPLODED_FIELD, JsonNodeFactory.instance.booleanNode(true)); + // Return the pair of component key and the document. We use the key later to build the unique + // docId. + return new Pair<>( + fieldSpec.getTimeseriesFieldCollectionAnnotation().getCollectionName() + key.get(), finalDocument); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java index a482cdeb659ca..7914d82748fa5 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/version/GitVersion.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import lombok.Value; - @Value public class GitVersion { String version; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java index e9183738c786c..9a64e397c9184 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectGenerationUtils.java @@ -15,11 +15,9 @@ import com.linkedin.mxe.SystemMetadata; import javax.annotation.Nonnull; - public class AspectGenerationUtils { - private AspectGenerationUtils() { - } + private AspectGenerationUtils() {} @Nonnull public static AuditStamp createAuditStamp() { @@ -37,7 +35,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St } @Nonnull - public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { + public static SystemMetadata createSystemMetadata( + long lastObserved, @Nonnull String runId, @Nonnull String lastRunId) { SystemMetadata metadata = new SystemMetadata(); metadata.setLastObserved(lastObserved); metadata.setRunId(runId); @@ -47,7 +46,8 @@ public static SystemMetadata createSystemMetadata(long lastObserved, @Nonnull St @Nonnull public static CorpUserKey createCorpUserKey(Urn urn) { - return (CorpUserKey) EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); + return (CorpUserKey) + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, new CorpUserKey().schema()); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java index e95378a616d97..2113e5a04f3a2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectIngestionUtils.java @@ -14,19 +14,19 @@ import java.util.Map; import javax.annotation.Nonnull; - public class AspectIngestionUtils { - private AspectIngestionUtils() { - } + private AspectIngestionUtils() {} @Nonnull - public static Map ingestCorpUserKeyAspects(EntityService entityService, int aspectCount) { + public static Map ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount) { return ingestCorpUserKeyAspects(entityService, aspectCount, 0); } @Nonnull - public static Map ingestCorpUserKeyAspects(EntityService entityService, int aspectCount, int startIndex) { + public static Map ingestCorpUserKeyAspects( + EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserKey()); Map aspects = new HashMap<>(); List items = new LinkedList<>(); @@ -34,24 +34,31 @@ public static Map ingestCorpUserKeyAspects(EntityService entit Urn urn = UrnUtils.getUrn(String.format("urn:li:corpuser:tester%d", i)); CorpUserKey aspect = AspectGenerationUtils.createCorpUserKey(urn); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() - .urn(urn) - .aspectName(aspectName) - .aspect(aspect) - .systemMetadata(AspectGenerationUtils.createSystemMetadata()) + items.add( + UpsertBatchItem.builder() + .urn(urn) + .aspectName(aspectName) + .aspect(aspect) + .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestCorpUserInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map ingestCorpUserInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map ingestCorpUserInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); Map aspects = new HashMap<>(); List items = new LinkedList<>(); @@ -60,24 +67,31 @@ public static Map ingestCorpUserInfoAspects(@Nonnull final En String email = String.format("email%d@test.com", i); CorpUserInfo aspect = AspectGenerationUtils.createCorpUserInfo(email); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } @Nonnull - public static Map ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount) { + public static Map ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount) { return ingestChartInfoAspects(entityService, aspectCount, 0); } @Nonnull - public static Map ingestChartInfoAspects(@Nonnull final EntityService entityService, int aspectCount, int startIndex) { + public static Map ingestChartInfoAspects( + @Nonnull final EntityService entityService, int aspectCount, int startIndex) { String aspectName = AspectGenerationUtils.getAspectName(new ChartInfo()); Map aspects = new HashMap<>(); List items = new LinkedList<>(); @@ -87,14 +101,19 @@ public static Map ingestChartInfoAspects(@Nonnull final EntitySe String description = String.format("Test description %d", i); ChartInfo aspect = AspectGenerationUtils.createChartInfo(title, description); aspects.put(urn, aspect); - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(aspectName) .aspect(aspect) .systemMetadata(AspectGenerationUtils.createSystemMetadata()) .build(entityService.getEntityRegistry())); } - entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), AspectGenerationUtils.createAuditStamp(), true, true); + entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), + AspectGenerationUtils.createAuditStamp(), + true, + true); return aspects; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java index 54fb2bc8b1f65..258b40cac6371 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/AspectUtilsTest.java @@ -1,11 +1,13 @@ package com.linkedin.metadata; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.dataset.DatasetProperties; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.TestEntityRegistry; @@ -23,19 +25,16 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class AspectUtilsTest { protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - public AspectUtilsTest() throws EntityRegistryException { - } + public AspectUtilsTest() throws EntityRegistryException {} @Test public void testAdditionalChanges() { @@ -45,18 +44,21 @@ public void testAdditionalChanges() { EventProducer mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - EntityServiceImpl entityServiceImpl = new EntityServiceImpl(aspectDao, mockProducer, _testEntityRegistry, true, - null, preProcessHooks); + EntityServiceImpl entityServiceImpl = + new EntityServiceImpl( + aspectDao, mockProducer, _testEntityRegistry, true, null, preProcessHooks); MetadataChangeProposal proposal1 = new MetadataChangeProposal(); - proposal1.setEntityUrn(new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); + proposal1.setEntityUrn( + new DatasetUrn(new DataPlatformUrn("platform"), "name", FabricType.PROD)); proposal1.setAspectName("datasetProperties"); DatasetProperties datasetProperties = new DatasetProperties().setName("name"); proposal1.setAspect(GenericRecordUtils.serializeAspect(datasetProperties)); proposal1.setEntityType("dataset"); proposal1.setChangeType(ChangeType.PATCH); - List proposalList = AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); + List proposalList = + AspectUtils.getAdditionalChanges(proposal1, entityServiceImpl); // proposals for key aspect, browsePath, browsePathV2, dataPlatformInstance Assert.assertEquals(proposalList.size(), 4); Assert.assertEquals(proposalList.get(0).getChangeType(), ChangeType.UPSERT); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java index 5645573917f00..70b7f587f0c19 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/CassandraTestUtils.java @@ -1,16 +1,13 @@ package com.linkedin.metadata; +import static org.testng.Assert.assertEquals; + import com.datastax.driver.core.KeyspaceMetadata; import com.datastax.driver.core.Row; import com.datastax.driver.core.Session; import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; import com.linkedin.metadata.entity.cassandra.CassandraAspect; -import org.testcontainers.containers.CassandraContainer; -import org.testcontainers.utility.DockerImageName; - -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.net.InetSocketAddress; import java.time.Duration; import java.util.Arrays; @@ -18,32 +15,37 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.testcontainers.containers.CassandraContainer; +import org.testcontainers.utility.DockerImageName; public class CassandraTestUtils { - private CassandraTestUtils() { - } + private CassandraTestUtils() {} private static final String KEYSPACE_NAME = "test"; private static final String IMAGE_NAME = "cassandra:3.11"; public static CassandraContainer setupContainer() { - final DockerImageName imageName = DockerImageName - .parse(IMAGE_NAME) - .asCompatibleSubstituteFor("cassandra"); + final DockerImageName imageName = + DockerImageName.parse(IMAGE_NAME).asCompatibleSubstituteFor("cassandra"); CassandraContainer container = new CassandraContainer(imageName); - container.withEnv("JVM_OPTS", "-Xms64M -Xmx96M") - .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min - .start(); + container + .withEnv("JVM_OPTS", "-Xms64M -Xmx96M") + .withStartupTimeout(Duration.ofMinutes(5)) // usually < 1min + .start(); try (Session session = container.getCluster().connect()) { - session.execute(String.format("CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" - + "{'class':'SimpleStrategy','replication_factor':'1'};", KEYSPACE_NAME)); session.execute( - String.format("create table %s.%s (urn varchar, \n" + String.format( + "CREATE KEYSPACE IF NOT EXISTS %s WITH replication = \n" + + "{'class':'SimpleStrategy','replication_factor':'1'};", + KEYSPACE_NAME)); + session.execute( + String.format( + "create table %s.%s (urn varchar, \n" + "aspect varchar, \n" + "systemmetadata varchar, \n" + "version bigint, \n" @@ -54,14 +56,13 @@ public static CassandraContainer setupContainer() { + "entity varchar, \n" + "primary key ((urn), aspect, version)) \n" + "with clustering order by (aspect asc, version asc);", - KEYSPACE_NAME, - CassandraAspect.TABLE_NAME)); + KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); List keyspaces = session.getCluster().getMetadata().getKeyspaces(); - List filteredKeyspaces = keyspaces - .stream() - .filter(km -> km.getName().equals(KEYSPACE_NAME)) - .collect(Collectors.toList()); + List filteredKeyspaces = + keyspaces.stream() + .filter(km -> km.getName().equals(KEYSPACE_NAME)) + .collect(Collectors.toList()); assertEquals(filteredKeyspaces.size(), 1); } @@ -72,20 +73,22 @@ public static CassandraContainer setupContainer() { public static CqlSession createTestSession(@Nonnull final CassandraContainer container) { Map sessionConfig = createTestServerConfig(container); int port = Integer.parseInt(sessionConfig.get("port")); - List addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { @@ -99,22 +102,29 @@ public static CqlSession createTestSession(@Nonnull final CassandraContainer con } @Nonnull - private static Map createTestServerConfig(@Nonnull final CassandraContainer container) { - return new HashMap() {{ - put("keyspace", KEYSPACE_NAME); - put("username", container.getUsername()); - put("password", container.getPassword()); - put("hosts", container.getHost()); - put("port", container.getMappedPort(9042).toString()); - put("datacenter", "datacenter1"); - put("useSsl", "false"); - }}; + private static Map createTestServerConfig( + @Nonnull final CassandraContainer container) { + return new HashMap() { + { + put("keyspace", KEYSPACE_NAME); + put("username", container.getUsername()); + put("password", container.getPassword()); + put("hosts", container.getHost()); + put("port", container.getMappedPort(9042).toString()); + put("datacenter", "datacenter1"); + put("useSsl", "false"); + } + }; } public static void purgeData(CassandraContainer container) { try (Session session = container.getCluster().connect()) { session.execute(String.format("TRUNCATE %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)); - List rs = session.execute(String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)).all(); + List rs = + session + .execute( + String.format("SELECT * FROM %s.%s;", KEYSPACE_NAME, CassandraAspect.TABLE_NAME)) + .all(); assertEquals(rs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java index 364ccd86d45fd..4ab421dab79dc 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/DockerTestUtils.java @@ -4,18 +4,20 @@ public class DockerTestUtils { - final private static int MIN_MEMORY_NEEDED_GB = 7; + private static final int MIN_MEMORY_NEEDED_GB = 7; - public static void checkContainerEngine(DockerClient dockerClient) { - final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); - final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; - if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { - final String error = String.format("Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " - + "of %d GB", dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); - throw new IllegalStateException(error); - } + public static void checkContainerEngine(DockerClient dockerClient) { + final long dockerEngineMemoryBytes = dockerClient.infoCmd().exec().getMemTotal(); + final long dockerEngineMemoryGB = dockerEngineMemoryBytes / 1000 / 1000 / 1000; + if (dockerEngineMemoryGB < MIN_MEMORY_NEEDED_GB) { + final String error = + String.format( + "Total Docker memory configured: %s GB (%d bytes) is below the minimum threshold " + + "of %d GB", + dockerEngineMemoryGB, dockerEngineMemoryBytes, MIN_MEMORY_NEEDED_GB); + throw new IllegalStateException(error); } + } - private DockerTestUtils() { - } + private DockerTestUtils() {} } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java index c6eefede8a860..ed5c882ace23e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/EbeanTestUtils.java @@ -4,13 +4,11 @@ import io.ebean.DatabaseFactory; import io.ebean.config.DatabaseConfig; import io.ebean.datasource.DataSourceConfig; - import javax.annotation.Nonnull; public class EbeanTestUtils { - private EbeanTestUtils() { - } + private EbeanTestUtils() {} @Nonnull public static Database createTestServer(String instanceId) { @@ -22,7 +20,8 @@ private static DatabaseConfig createTestingH2ServerConfig(String instanceId) { DataSourceConfig dataSourceConfig = new DataSourceConfig(); dataSourceConfig.setUsername("tester"); dataSourceConfig.setPassword(""); - dataSourceConfig.setUrl(String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); + dataSourceConfig.setUrl( + String.format("jdbc:h2:mem:%s;IGNORECASE=TRUE;mode=mysql;", instanceId)); dataSourceConfig.setDriver("org.h2.Driver"); DatabaseConfig serverConfig = new DatabaseConfig(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java index 57c14608a7881..fd218add2a945 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntitySpecBuilder.java @@ -4,11 +4,9 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.EntitySpecBuilder; - public class TestEntitySpecBuilder { - private TestEntitySpecBuilder() { - } + private TestEntitySpecBuilder() {} public static EntitySpec getSpec() { return new EntitySpecBuilder().buildEntitySpec(new TestEntitySnapshot().schema()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java index 4b1b8c89b030b..ea4bb69d942f9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/TestEntityUtil.java @@ -1,7 +1,5 @@ package com.linkedin.metadata; -import com.datahub.test.TestBrowsePaths; -import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.BrowsePathEntry; import com.datahub.test.BrowsePathEntryArray; import com.datahub.test.KeyPartEnum; @@ -9,6 +7,8 @@ import com.datahub.test.SimpleNestedRecord1; import com.datahub.test.SimpleNestedRecord2; import com.datahub.test.SimpleNestedRecord2Array; +import com.datahub.test.TestBrowsePaths; +import com.datahub.test.TestBrowsePathsV2; import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -21,10 +21,8 @@ import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringMap; - public class TestEntityUtil { - private TestEntityUtil() { - } + private TestEntityUtil() {} public static Urn getTestEntityUrn() { return new TestEntityUrn("key", "urn", "VALUE_1"); @@ -38,15 +36,39 @@ public static TestEntityInfo getTestEntityInfo(Urn urn) { TestEntityInfo testEntityInfo = new TestEntityInfo(); testEntityInfo.setTextField("test"); testEntityInfo.setTextArrayField(new StringArray(ImmutableList.of("testArray1", "testArray2"))); - testEntityInfo.setNestedRecordField(new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); - testEntityInfo.setNestedRecordArrayField(new SimpleNestedRecord2Array( - ImmutableList.of(new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), - new SimpleNestedRecord2().setNestedArrayStringField("nestedArray2") - .setNestedArrayArrayField(new StringArray(ImmutableList.of("testNestedArray1", "testNestedArray2")))))); - testEntityInfo.setCustomProperties(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); - testEntityInfo.setEsObjectField(new StringMap(ImmutableMap.of("key1", "value1", "key2", "value2", - "shortValue", "123", "longValue", "0123456789"))); + testEntityInfo.setNestedRecordField( + new SimpleNestedRecord1().setNestedIntegerField(1).setNestedForeignKey(urn)); + testEntityInfo.setNestedRecordArrayField( + new SimpleNestedRecord2Array( + ImmutableList.of( + new SimpleNestedRecord2().setNestedArrayStringField("nestedArray1"), + new SimpleNestedRecord2() + .setNestedArrayStringField("nestedArray2") + .setNestedArrayArrayField( + new StringArray( + ImmutableList.of("testNestedArray1", "testNestedArray2")))))); + testEntityInfo.setCustomProperties( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); + testEntityInfo.setEsObjectField( + new StringMap( + ImmutableMap.of( + "key1", + "value1", + "key2", + "value2", + "shortValue", + "123", + "longValue", + "0123456789"))); return testEntityInfo; } @@ -55,7 +77,8 @@ public static TestEntitySnapshot getSnapshot() { Urn urn = getTestEntityUrn(); snapshot.setUrn(urn); - TestBrowsePaths browsePaths = new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); + TestBrowsePaths browsePaths = + new TestBrowsePaths().setPaths(new StringArray(ImmutableList.of("/a/b/c", "d/e/f"))); BrowsePathEntryArray browsePathV2Entries = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("levelOne"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("levelTwo"); @@ -64,10 +87,14 @@ public static TestEntitySnapshot getSnapshot() { TestBrowsePathsV2 browsePathsV2 = new TestBrowsePathsV2().setPath(browsePathV2Entries); SearchFeatures searchFeatures = new SearchFeatures().setFeature1(2).setFeature2(1); - TestEntityAspectArray aspects = new TestEntityAspectArray( - ImmutableList.of(TestEntityAspect.create(getTestEntityKey(urn)), - TestEntityAspect.create(getTestEntityInfo(urn)), TestEntityAspect.create(browsePaths), - TestEntityAspect.create(searchFeatures), TestEntityAspect.create(browsePathsV2))); + TestEntityAspectArray aspects = + new TestEntityAspectArray( + ImmutableList.of( + TestEntityAspect.create(getTestEntityKey(urn)), + TestEntityAspect.create(getTestEntityInfo(urn)), + TestEntityAspect.create(browsePaths), + TestEntityAspect.create(searchFeatures), + TestEntityAspect.create(browsePathsV2))); snapshot.setAspects(aspects); return snapshot; } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java index f52bc26b5c538..fba11f24f4c44 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/client/JavaEntityClientTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.client; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.codahale.metrics.Counter; import com.linkedin.data.template.RequiredFieldNotPresentException; import com.linkedin.entity.client.RestliEntityClient; @@ -12,124 +16,115 @@ import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.function.Supplier; import org.mockito.MockedStatic; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.function.Supplier; - -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class JavaEntityClientTest { - private EntityService _entityService; - private DeleteEntityService _deleteEntityService; - private EntitySearchService _entitySearchService; - private CachingEntitySearchService _cachingEntitySearchService; - private SearchService _searchService; - private LineageSearchService _lineageSearchService; - private TimeseriesAspectService _timeseriesAspectService; - private EventProducer _eventProducer; - private RestliEntityClient _restliEntityClient; - private MockedStatic _metricUtils; - private Counter _counter; - - - @BeforeMethod - public void setupTest() { - _entityService = mock(EntityService.class); - _deleteEntityService = mock(DeleteEntityService.class); - _entitySearchService = mock(EntitySearchService.class); - _cachingEntitySearchService = mock(CachingEntitySearchService.class); - _searchService = mock(SearchService.class); - _lineageSearchService = mock(LineageSearchService.class); - _timeseriesAspectService = mock(TimeseriesAspectService.class); - _eventProducer = mock(EventProducer.class); - _restliEntityClient = mock(RestliEntityClient.class); - _metricUtils = mockStatic(MetricUtils.class); - _counter = mock(Counter.class); - when(MetricUtils.counter(any(), any())).thenReturn(_counter); - } - - @AfterMethod - public void closeTest() { - _metricUtils.close(); - } - - private JavaEntityClient getJavaEntityClient() { - return new JavaEntityClient( - _entityService, - _deleteEntityService, - _entitySearchService, - _cachingEntitySearchService, - _searchService, - _lineageSearchService, - _timeseriesAspectService, - _eventProducer, - _restliEntityClient); - } - - @Test - void testSuccessWithNoRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier mockSupplier = mock(Supplier.class); - - when(mockSupplier.get()).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, null), 42); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); - } - - @Test - void testSuccessAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); - - assertEquals(client.withRetry(mockSupplier, "test"), 42); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(3) - ); - } - - @Test - void testThrowAfterMultipleRetries() { - JavaEntityClient client = getJavaEntityClient(); - Supplier mockSupplier = mock(Supplier.class); - Exception e = new IllegalArgumentException(); - - when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); - - assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); - verify(mockSupplier, times(4)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), - times(4) - ); - } - - @Test - void testThrowAfterNonRetryableException() { - JavaEntityClient client = getJavaEntityClient(); - Supplier mockSupplier = mock(Supplier.class); - Exception e = new RequiredFieldNotPresentException("test"); - - when(mockSupplier.get()).thenThrow(e); - - assertThrows(RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); - verify(mockSupplier, times(1)).get(); - _metricUtils.verify( - () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), - times(1) - ); - } -} \ No newline at end of file + private EntityService _entityService; + private DeleteEntityService _deleteEntityService; + private EntitySearchService _entitySearchService; + private CachingEntitySearchService _cachingEntitySearchService; + private SearchService _searchService; + private LineageSearchService _lineageSearchService; + private TimeseriesAspectService _timeseriesAspectService; + private EventProducer _eventProducer; + private RestliEntityClient _restliEntityClient; + private MockedStatic _metricUtils; + private Counter _counter; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _deleteEntityService = mock(DeleteEntityService.class); + _entitySearchService = mock(EntitySearchService.class); + _cachingEntitySearchService = mock(CachingEntitySearchService.class); + _searchService = mock(SearchService.class); + _lineageSearchService = mock(LineageSearchService.class); + _timeseriesAspectService = mock(TimeseriesAspectService.class); + _eventProducer = mock(EventProducer.class); + _restliEntityClient = mock(RestliEntityClient.class); + _metricUtils = mockStatic(MetricUtils.class); + _counter = mock(Counter.class); + when(MetricUtils.counter(any(), any())).thenReturn(_counter); + } + + @AfterMethod + public void closeTest() { + _metricUtils.close(); + } + + private JavaEntityClient getJavaEntityClient() { + return new JavaEntityClient( + _entityService, + _deleteEntityService, + _entitySearchService, + _cachingEntitySearchService, + _searchService, + _lineageSearchService, + _timeseriesAspectService, + _eventProducer, + _restliEntityClient); + } + + @Test + void testSuccessWithNoRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier mockSupplier = mock(Supplier.class); + + when(mockSupplier.get()).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, null), 42); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify(() -> MetricUtils.counter(any(), any()), times(0)); + } + + @Test + void testSuccessAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenReturn(42); + + assertEquals(client.withRetry(mockSupplier, "test"), 42); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(3)); + } + + @Test + void testThrowAfterMultipleRetries() { + JavaEntityClient client = getJavaEntityClient(); + Supplier mockSupplier = mock(Supplier.class); + Exception e = new IllegalArgumentException(); + + when(mockSupplier.get()).thenThrow(e).thenThrow(e).thenThrow(e).thenThrow(e); + + assertThrows(IllegalArgumentException.class, () -> client.withRetry(mockSupplier, "test")); + verify(mockSupplier, times(4)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "test_exception_" + e.getClass().getName()), + times(4)); + } + + @Test + void testThrowAfterNonRetryableException() { + JavaEntityClient client = getJavaEntityClient(); + Supplier mockSupplier = mock(Supplier.class); + Exception e = new RequiredFieldNotPresentException("test"); + + when(mockSupplier.get()).thenThrow(e); + + assertThrows( + RequiredFieldNotPresentException.class, () -> client.withRetry(mockSupplier, null)); + verify(mockSupplier, times(1)).get(); + _metricUtils.verify( + () -> MetricUtils.counter(client.getClass(), "exception_" + e.getClass().getName()), + times(1)); + } +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java index 10a73cbe532a2..e13c2d9fd1005 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/BulkListenerTest.java @@ -1,39 +1,39 @@ package com.linkedin.metadata.elasticsearch.update; -import com.linkedin.metadata.search.elasticsearch.update.BulkListener; -import org.opensearch.action.bulk.BulkRequest; -import org.opensearch.action.support.WriteRequest; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.ArgumentMatchers.any; -import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; + +import com.linkedin.metadata.search.elasticsearch.update.BulkListener; +import org.mockito.Mockito; +import org.opensearch.action.bulk.BulkRequest; +import org.opensearch.action.support.WriteRequest; +import org.testng.annotations.Test; public class BulkListenerTest { - @Test - public void testConstructor() { - BulkListener test = BulkListener.getInstance(); - assertNotNull(test); - assertEquals(test, BulkListener.getInstance()); - assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); - } + @Test + public void testConstructor() { + BulkListener test = BulkListener.getInstance(); + assertNotNull(test); + assertEquals(test, BulkListener.getInstance()); + assertNotEquals(test, BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE)); + } - @Test - public void testDefaultPolicy() { - BulkListener test = BulkListener.getInstance(); + @Test + public void testDefaultPolicy() { + BulkListener test = BulkListener.getInstance(); - BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); - test.beforeBulk(0L, mockRequest1); - verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); + BulkRequest mockRequest1 = Mockito.mock(BulkRequest.class); + test.beforeBulk(0L, mockRequest1); + verify(mockRequest1, times(0)).setRefreshPolicy(any(WriteRequest.RefreshPolicy.class)); - BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); - test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); - test.beforeBulk(0L, mockRequest2); - verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - } + BulkRequest mockRequest2 = Mockito.mock(BulkRequest.class); + test = BulkListener.getInstance(WriteRequest.RefreshPolicy.IMMEDIATE); + test.beforeBulk(0L, mockRequest2); + verify(mockRequest2, times(1)).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java index 2d84c9f3444de..94e57b80d8113 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/elasticsearch/update/ESBulkProcessorTest.java @@ -1,18 +1,18 @@ package com.linkedin.metadata.elasticsearch.update; +import static org.testng.Assert.assertNotNull; + import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import org.opensearch.client.RestHighLevelClient; import org.mockito.Mockito; +import org.opensearch.client.RestHighLevelClient; import org.testng.annotations.Test; -import static org.testng.Assert.assertNotNull; - public class ESBulkProcessorTest { - @Test - public void testESBulkProcessorBuilder() { - RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); - ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); - assertNotNull(test); - } + @Test + public void testESBulkProcessorBuilder() { + RestHighLevelClient mock = Mockito.mock(RestHighLevelClient.class); + ESBulkProcessor test = ESBulkProcessor.builder(mock).build(); + assertNotNull(test); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java index 6a331647583d2..6d464d9cd9a10 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/AspectMigrationsDaoTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.AspectIngestionUtils; @@ -18,11 +21,7 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - -abstract public class AspectMigrationsDaoTest { +public abstract class AspectMigrationsDaoTest { protected T _migrationsDao; @@ -37,8 +36,11 @@ abstract public class AspectMigrationsDaoTest { protected AspectMigrationsDaoTest() throws EntityRegistryException { _snapshotEntityRegistry = new TestEntityRegistry(); - _configEntityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); } @Test @@ -46,8 +48,10 @@ public void testListAllUrns() throws AssertionError { final int totalAspects = 30; final int pageSize = 25; final int lastPageSize = 5; - Map ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + Map ingestedAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); + List ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); List seenUrns = new ArrayList<>(); Iterable page1 = _migrationsDao.listAllUrns(0, pageSize); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java index 70161fe640707..d94de604bf44d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraAspectMigrationsDaoTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -15,15 +17,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class CassandraAspectMigrationsDaoTest extends AspectMigrationsDaoTest { private CassandraContainer _cassandraContainer; - public CassandraAspectMigrationsDaoTest() throws EntityRegistryException { - } + public CassandraAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -49,8 +47,14 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, _mockUpdateIndicesService, - preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -59,8 +63,8 @@ private void configureComponents() { /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java index 50e562b76c4e6..74c81ff2e8602 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/CassandraEntityServiceTest.java @@ -1,6 +1,8 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; @@ -8,6 +10,7 @@ import com.linkedin.metadata.AspectGenerationUtils; import com.linkedin.metadata.AspectIngestionUtils; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -28,22 +31,20 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + *

This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class CassandraEntityServiceTest extends EntityServiceTest { +public class CassandraEntityServiceTest + extends EntityServiceTest { private CassandraContainer _cassandraContainer; - public CassandraEntityServiceTest() throws EntityRegistryException { - } + public CassandraEntityServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -69,16 +70,22 @@ private void configureComponents() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new CassandraRetentionService(_entityServiceImpl, session, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -99,7 +106,8 @@ public void testIngestListLatestAspects() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map writtenAspects = AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); + Map writtenAspects = + AspectIngestionUtils.ingestCorpUserInfoAspects(_entityServiceImpl, totalEntities); Set writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); String aspect = AspectGenerationUtils.getAspectName(new CorpUserInfo()); @@ -111,7 +119,8 @@ public void testIngestListLatestAspects() throws AssertionError { int expectedEntityCount = isLastPage ? expectedEntitiesInLastPage : pageSize; int expectedNextStart = isLastPage ? -1 : pageStart + pageSize; - ListResult page = _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); + ListResult page = + _entityServiceImpl.listLatestAspects(entity, aspect, pageStart, pageSize); // Check paging metadata works as expected assertEquals(page.getNextStart(), expectedNextStart); @@ -121,15 +130,26 @@ public void testIngestListLatestAspects() throws AssertionError { assertEquals(page.getValues().size(), expectedEntityCount); // Remember all URNs we've seen returned for later assertions - readUrns.addAll(page.getMetadata().getExtraInfos().stream().map(ExtraInfo::getUrn).collect(Collectors.toList())); + readUrns.addAll( + page.getMetadata().getExtraInfos().stream() + .map(ExtraInfo::getUrn) + .collect(Collectors.toList())); } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } @@ -147,7 +167,8 @@ public void testIngestListUrns() throws AssertionError { final int expectedTotalPages = 4; final int expectedEntitiesInLastPage = 10; - Map writtenAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); + Map writtenAspects = + AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalEntities); Set writtenUrns = writtenAspects.keySet(); String entity = writtenUrns.stream().findFirst().get().getEntityType(); @@ -169,11 +190,19 @@ public void testIngestListUrns() throws AssertionError { } assertEquals(readUrns.size(), writtenUrns.size()); - // Check that all URNs we've created were seen in some page or other (also check that none were seen more than once) - // We can't be strict on exact order of items in the responses because Cassandra query limitations get in the way here. + // Check that all URNs we've created were seen in some page or other (also check that none were + // seen more than once) + // We can't be strict on exact order of items in the responses because Cassandra query + // limitations get in the way here. for (Urn wUrn : writtenUrns) { - long matchingUrnCount = readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); - assertEquals(matchingUrnCount, 1L, String.format("Each URN should appear exactly once. %s appeared %d times.", wUrn, matchingUrnCount)); + long matchingUrnCount = + readUrns.stream().filter(rUrn -> rUrn.toString().equals(wUrn.toString())).count(); + assertEquals( + matchingUrnCount, + 1L, + String.format( + "Each URN should appear exactly once. %s appeared %d times.", + wUrn, matchingUrnCount)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java index 98f9ce241b850..496744770dba8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityServiceTest.java @@ -1,6 +1,9 @@ package com.linkedin.metadata.entity; -import com.linkedin.metadata.config.PreProcessHooks; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -10,6 +13,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.GraphService; @@ -28,11 +32,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DeleteEntityServiceTest { protected EbeanAspectDao _aspectDao; @@ -46,14 +45,21 @@ public class DeleteEntityServiceTest { protected EntityRegistry _entityRegistry; public DeleteEntityServiceTest() { - _entityRegistry = new ConfigEntityRegistry(Snapshot.class.getClassLoader() - .getResourceAsStream("entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); _aspectDao = mock(EbeanAspectDao.class); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, mock(EventProducer.class), _entityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + mock(EventProducer.class), + _entityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _deleteEntityService = new DeleteEntityService(_entityServiceImpl, _graphService); } @@ -66,11 +72,19 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Urn container = UrnUtils.getUrn("urn:li:container:d1006cf3-3ff9-48e3-85cd-26eb23775ab2"); final RelatedEntitiesResult mockRelatedEntities = - new RelatedEntitiesResult(0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); - - Mockito.when(_graphService.findRelatedEntities(null, newFilter("urn", container.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000)) + new RelatedEntitiesResult( + 0, 1, 1, ImmutableList.of(new RelatedEntity("IsPartOf", dataset.toString()))); + + Mockito.when( + _graphService.findRelatedEntities( + null, + newFilter("urn", container.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000)) .thenReturn(mockRelatedEntities); final EntityResponse entityResponse = new EntityResponse(); @@ -78,14 +92,16 @@ public void testDeleteUniqueRefGeneratesValidMCP() { entityResponse.setEntityName(dataset.getEntityType()); final Container containerAspect = new Container(); containerAspect.setContainer(container); - final EntityAspectIdentifier dbKey = new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); + final EntityAspectIdentifier dbKey = + new EntityAspectIdentifier(dataset.toString(), Constants.CONTAINER_ASPECT_NAME, 0); final EntityAspect dbValue = new EntityAspect(); dbValue.setUrn(dataset.toString()); dbValue.setVersion(0); dbValue.setAspect(Constants.CONTAINER_ASPECT_NAME); dbValue.setMetadata(RecordUtils.toJsonString(containerAspect)); - dbValue.setSystemMetadata(RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); + dbValue.setSystemMetadata( + RecordUtils.toJsonString(SystemMetadataUtils.createDefaultSystemMetadata())); final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); dbValue.setCreatedBy(auditStamp.getActor().toString()); dbValue.setCreatedOn(new Timestamp(auditStamp.getTime())); @@ -93,15 +109,25 @@ public void testDeleteUniqueRefGeneratesValidMCP() { final Map dbEntries = Map.of(dbKey, dbValue); Mockito.when(_aspectDao.batchGet(Mockito.any())).thenReturn(dbEntries); - RollbackResult result = new RollbackResult(container, Constants.DATASET_ENTITY_NAME, - Constants.CONTAINER_ASPECT_NAME, containerAspect, null, null, null, - ChangeType.DELETE, false, 1); + RollbackResult result = + new RollbackResult( + container, + Constants.DATASET_ENTITY_NAME, + Constants.CONTAINER_ASPECT_NAME, + containerAspect, + null, + null, + null, + ChangeType.DELETE, + false, + 1); Mockito.when(_aspectDao.runInTransactionWithRetry(Mockito.any(), Mockito.anyInt())) .thenReturn(result); - final DeleteReferencesResponse response = _deleteEntityService.deleteReferencesTo(container, false); + final DeleteReferencesResponse response = + _deleteEntityService.deleteReferencesTo(container, false); assertEquals(1, (int) response.getTotal()); assertFalse(response.getRelatedAspects().isEmpty()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java index 67c9bd0a9e014..943ad2967de42 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/DeleteEntityUtilsTest.java @@ -14,71 +14,62 @@ public class DeleteEntityUtilsTest extends TestCase { - /** - * Tests that Aspect Processor deletes the entire struct if it no longer has any fields - */ + /** Tests that Aspect Processor deletes the entire struct if it no longer has any fields */ @Test public void testEmptyStructRemoval() { final String value = "{\"key_a\": \"hello\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: optional string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().isEmpty()); } - /** - * Tests that Aspect Processor deletes & removes optional values from a struct. - */ + /** Tests that Aspect Processor deletes & removes optional values from a struct. */ @Test public void testOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a")); assertFalse(updatedAspect.data().containsKey("key_a")); assertTrue(updatedAspect.data().containsKey("key_b")); assertEquals("world", updatedAspect.data().get("key_b")); } - /** - * Tests that Aspect Processor does not delete a non-optional value from a struct. - */ + /** Tests that Aspect Processor does not delete a non-optional value from a struct. */ @Test public void testNonOptionalFieldRemoval() { final String value = "{\"key_a\": \"hello\", \"key_b\": \"world\"}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - assertNull(DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a"))); + assertNull( + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a"))); } /** - * Tests that Aspect Processor deletes a required value from a record referenced by another record. + * Tests that Aspect Processor deletes a required value from a record referenced by another + * record. */ @Test public void testNestedFieldRemoval() { @@ -86,24 +77,21 @@ public void testNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested structs. + * Tests that Aspect Processor is able to delete an optional sub-field while preserving nested + * structs. */ @Test public void testOptionalNestedFieldRemoval() { @@ -111,18 +99,15 @@ public void testOptionalNestedFieldRemoval() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); @@ -133,8 +118,8 @@ public void testOptionalNestedFieldRemoval() { } /** - * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it becomes empty & - * is optional at some higher level. + * Tests that the Aspect Processor will delete an entire struct if after removal of a field, it + * becomes empty & is optional at some higher level. */ @Test public void testRemovalOptionalFieldWithNonOptionalSubfield() { @@ -142,18 +127,15 @@ public void testRemovalOptionalFieldWithNonOptionalSubfield() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record\n" + "}"); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("world", aspect, schema, - new PathSpec("key_c", "key_b")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "world", aspect, schema, new PathSpec("key_c", "key_b")); assertFalse(updatedAspect.data().containsKey("key_c")); } @@ -164,15 +146,14 @@ public void testRemovalFromSingleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertTrue(((DataList) updatedAspect.data().get("key_a")).isEmpty()); @@ -184,15 +165,14 @@ public void testRemovalFromMultipleArray() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: array[string]\n" - + "}"); + pdlSchemaParser.parse("record simple_record {\n" + "key_a: array[string]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_a")).size()); final DataSchema schema = pdlSchemaParser.lookupName("simple_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_a", "*")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_a", "*")); assertTrue(updatedAspect.data().containsKey("key_a")); assertEquals(1, ((DataList) updatedAspect.data().get("key_a")).size()); @@ -200,28 +180,27 @@ public void testRemovalFromMultipleArray() { } /** - * Tests that Aspect Processor is able to remove sub-field from array field while preserving nested structs. + * Tests that Aspect Processor is able to remove sub-field from array field while preserving + * nested structs. */ @Test public void testRemovalNestedFieldFromArray() { - final String value = "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; + final String value = + "{\"key_c\": [{\"key_a\": \"hello\", \"key_b\": \"world\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); @@ -229,99 +208,95 @@ public void testRemovalNestedFieldFromArray() { assertNotSame(aspect.data().get("key_c"), updatedAspect.data().get("key_c")); // key_a field from first element from key_c should have been removed - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "world", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).get("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor is able to remove element from array field. - */ + /** Tests that Aspect Processor is able to remove element from array field. */ @Test public void testRemovalElementFromArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}, {\"key_b\": \"extra info\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(2, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(2, ((DataList) updatedAspect.data().get("key_c")).size()); // First element from key_c should have been emptied - assertFalse(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); + assertFalse( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).containsKey("key_a")); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); - assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); - assertEquals("extra info", ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); + assertTrue( + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).containsKey("key_b")); + assertEquals( + "extra info", + ((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(1)).get("key_b")); } - /** - * Tests that Aspect Processor removes array if empty when removing underlying structs - */ + /** Tests that Aspect Processor removes array if empty when removing underlying structs */ @Test public void testRemovalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: optional string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: optional string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: array[simple_record]\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); assertTrue(updatedAspect.data().containsKey("key_c")); assertEquals(1, ((DataList) updatedAspect.data().get("key_c")).size()); assertTrue(((DataMap) ((DataList) updatedAspect.data().get("key_c")).get(0)).isEmpty()); } - /** - * Tests that Aspect Processor removes optional array field from struct when it is empty - */ + /** Tests that Aspect Processor removes optional array field from struct when it is empty */ @Test public void testRemovalOptionalEmptyArray() { final String value = "{\"key_c\": [{\"key_a\": \"hello\"}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record {\n" - + "key_a: string\n" - + "key_b: optional string\n" - + "}"); + pdlSchemaParser.parse( + "record simple_record {\n" + "key_a: string\n" + "key_b: optional string\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional array[simple_record]\n" - + "}"); + pdlSchemaParser.parse( + "record complex_record {\n" + "key_c: optional array[simple_record]\n" + "}"); assertEquals(1, ((DataList) aspect.data().get("key_c")).size()); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "*", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "*", "key_a")); // contains an empty key_c assertTrue(updatedAspect.data().containsKey("key_c")); @@ -329,7 +304,8 @@ public void testRemovalOptionalEmptyArray() { } /** - * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional field. + * Tests that Aspect Processor removes nested structs more than 1 level deep from an optional + * field. */ @Test public void testNestedNonOptionalSubFieldsOnOptionalField() { @@ -337,46 +313,50 @@ public void testNestedNonOptionalSubFieldsOnOptionalField() { final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final PdlSchemaParser pdlSchemaParser = new PdlSchemaParser(new DefaultDataSchemaResolver()); - pdlSchemaParser.parse("record simple_record_1 {\n" - + "key_a: string\n" - + "}"); + pdlSchemaParser.parse("record simple_record_1 {\n" + "key_a: string\n" + "}"); - pdlSchemaParser.parse("record simple_record_2 {\n" - + "key_b: simple_record_1\n" - + "}"); + pdlSchemaParser.parse("record simple_record_2 {\n" + "key_b: simple_record_1\n" + "}"); - pdlSchemaParser.parse("record complex_record {\n" - + "key_c: optional simple_record_2\n" - + "}"); + pdlSchemaParser.parse("record complex_record {\n" + "key_c: optional simple_record_2\n" + "}"); assertTrue(aspect.data().containsKey("key_c")); final DataSchema schema = pdlSchemaParser.lookupName("complex_record"); - final Aspect updatedAspect = DeleteEntityUtils.getAspectWithReferenceRemoved("hello", aspect, schema, - new PathSpec("key_c", "key_b", "key_a")); + final Aspect updatedAspect = + DeleteEntityUtils.getAspectWithReferenceRemoved( + "hello", aspect, schema, new PathSpec("key_c", "key_b", "key_a")); assertFalse(updatedAspect.data().containsKey("key_c")); } /** - * Tests that the aspect processor is able to remove fields that are deeply nested where the top-level field is - * optional. - * This example is based on the SchemaMetadata object. + * Tests that the aspect processor is able to remove fields that are deeply nested where the + * top-level field is optional. This example is based on the SchemaMetadata object. */ @Test public void testSchemaMetadataDelete() { - final String value = "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; + final String value = + "{\"fields\": [{\"globalTags\": {\"tags\": [{\"tag\": \"urn:li:tag:Dimension\"}]}}]}"; final Aspect aspect = RecordUtils.toRecordTemplate(Aspect.class, value); final Aspect updatedAspect = - DeleteEntityUtils.getAspectWithReferenceRemoved("urn:li:tag:Dimension", aspect, SchemaMetadata.dataSchema(), + DeleteEntityUtils.getAspectWithReferenceRemoved( + "urn:li:tag:Dimension", + aspect, + SchemaMetadata.dataSchema(), new PathSpec("fields", "*", "globalTags", "tags", "*", "tag")); assertFalse(updatedAspect.data().toString().contains("urn:li:tag:Dimension")); assertTrue(updatedAspect.data().containsKey("fields")); // tags must be empty, not field assertEquals(1, ((DataList) updatedAspect.data().get("fields")).size()); - assertEquals(0, ((DataList) ((DataMap) ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) - .get("globalTags")).get("tags")).size()); + assertEquals( + 0, + ((DataList) + ((DataMap) + ((DataMap) ((DataList) updatedAspect.data().get("fields")).get(0)) + .get("globalTags")) + .get("tags")) + .size()); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java index 30d821662d377..2430ebb1f94be 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanAspectMigrationsDaoTest.java @@ -1,9 +1,13 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.AspectIngestionUtils; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.event.EventProducer; @@ -19,27 +23,28 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EbeanAspectMigrationsDaoTest extends AspectMigrationsDaoTest { - public EbeanAspectMigrationsDaoTest() throws EntityRegistryException { - } + public EbeanAspectMigrationsDaoTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanAspectMigrationsDaoTest.class.getSimpleName()); _mockProducer = mock(EventProducer.class); EbeanAspectDao dao = new EbeanAspectDao(server); dao.setConnectionValidated(true); _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(dao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + dao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); @@ -51,12 +56,15 @@ public void testStreamAspects() throws AssertionError { final int totalAspects = 30; Map ingestedAspects = AspectIngestionUtils.ingestCorpUserKeyAspects(_entityServiceImpl, totalAspects); - List ingestedUrns = ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); + List ingestedUrns = + ingestedAspects.keySet().stream().map(Urn::toString).collect(Collectors.toList()); - Stream aspectStream = _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); + Stream aspectStream = + _migrationsDao.streamAspects(CORP_USER_ENTITY_NAME, CORP_USER_KEY_ASPECT_NAME); List aspectList = aspectStream.collect(Collectors.toList()); assertEquals(ingestedUrns.size(), aspectList.size()); - Set urnsFetched = aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); + Set urnsFetched = + aspectList.stream().map(EntityAspect::getUrn).collect(Collectors.toSet()); for (String urn : ingestedUrns) { assertTrue(urnsFetched.contains(urn)); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java index e8a7d8740d328..eeb014f7afdc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java @@ -1,15 +1,19 @@ package com.linkedin.metadata.entity; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.AuditStamp; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.DataTemplateUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.AspectGenerationUtils; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; @@ -27,11 +31,6 @@ import io.ebean.Transaction; import io.ebean.TxScope; import io.ebean.annotation.TxIsolation; -import org.apache.commons.lang3.tuple.Triple; -import org.testng.Assert; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collection; import java.util.List; @@ -41,23 +40,23 @@ import java.util.concurrent.LinkedBlockingQueue; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static org.mockito.Mockito.mock; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - +import org.apache.commons.lang3.tuple.Triple; +import org.testng.Assert; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; /** - * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link EntityServiceTest} to run integration tests against a + * relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + *

This class also contains all the test methods where realities of an underlying storage leak + * into the {@link EntityServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ -public class EbeanEntityServiceTest extends EntityServiceTest { +public class EbeanEntityServiceTest + extends EntityServiceTest { - public EbeanEntityServiceTest() throws EntityRegistryException { - } + public EbeanEntityServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { @@ -69,16 +68,22 @@ public void setupTest() { _mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); _retentionService = new EbeanRetentionService(_entityServiceImpl, server, 1000); _entityServiceImpl.setRetentionService(_retentionService); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws AssertionError { @@ -111,30 +116,32 @@ public void testIngestListLatestAspects() throws AssertionError { // Ingest CorpUserInfo Aspect #3 CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - List items = List.of( + List items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects - ListResult batch1 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); + ListResult batch1 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 0, 2); assertEquals(batch1.getNextStart(), 2); assertEquals(batch1.getPageSize(), 2); @@ -144,7 +151,8 @@ public void testIngestListLatestAspects() throws AssertionError { assertTrue(DataTemplateUtil.areEqual(writeAspect1, batch1.getValues().get(0))); assertTrue(DataTemplateUtil.areEqual(writeAspect2, batch1.getValues().get(1))); - ListResult batch2 = _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); + ListResult batch2 = + _entityServiceImpl.listLatestAspects(entityUrn1.getEntityType(), aspectName, 2, 2); assertEquals(batch2.getValues().size(), 1); assertTrue(DataTemplateUtil.areEqual(writeAspect3, batch2.getValues().get(0))); } @@ -175,27 +183,28 @@ public void testIngestListUrns() throws AssertionError { // Ingest CorpUserInfo Aspect #3 RecordTemplate writeAspect3 = AspectGenerationUtils.createCorpUserKey(entityUrn3); - List items = List.of( + List items = + List.of( UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); // List aspects urns ListUrnsResult batch1 = _entityServiceImpl.listUrns(entityUrn1.getEntityType(), 0, 2); @@ -221,12 +230,13 @@ public void testIngestListUrns() throws AssertionError { public void testNestedTransactions() throws AssertionError { Database server = _aspectDao.getServer(); - try (Transaction transaction = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction = + server.beginTransaction(TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction.setBatchMode(true); // Work 1 - try (Transaction transaction2 = server.beginTransaction(TxScope.requiresNew() - .setIsolation(TxIsolation.REPEATABLE_READ))) { + try (Transaction transaction2 = + server.beginTransaction( + TxScope.requiresNew().setIsolation(TxIsolation.REPEATABLE_READ))) { transaction2.setBatchMode(true); // Work 2 transaction2.commit(); @@ -240,20 +250,21 @@ public void testNestedTransactions() throws AssertionError { System.out.println("done"); } - @Test public void dataGeneratorThreadingTest() { DataGenerator dataGenerator = new DataGenerator(_entityServiceImpl); List aspects = List.of("status", "globalTags", "glossaryTerms"); - List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); // Expected no duplicates aspects - List duplicates = testData.stream() + List duplicates = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.groupingBy(Triple::toString)) - .entrySet().stream() + .entrySet() + .stream() .filter(e -> e.getValue().size() > 1) .map(Map.Entry::getKey) .collect(Collectors.toList()); @@ -271,38 +282,48 @@ public void multiThreadingTest() { // Add data List aspects = List.of("status", "globalTags", "glossaryTerms"); - List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 15); // Expected aspects - Set> generatedAspectIds = testData.stream() + Set> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set> additions = actualAspectIds.stream() + Set> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set> missing = generatedAspectIds.stream() + Set> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } /** - * Don't blame multi-threading for what might not be a threading issue. - * Perform the multi-threading test with 1 thread. + * Don't blame multi-threading for what might not be a threading issue. Perform the + * multi-threading test with 1 thread. */ @Test public void singleThreadingTest() { @@ -311,85 +332,106 @@ public void singleThreadingTest() { // Add data List aspects = List.of("status", "globalTags", "glossaryTerms"); - List> testData = dataGenerator.generateMCPs("dataset", 25, aspects) - .collect(Collectors.toList()); + List> testData = + dataGenerator.generateMCPs("dataset", 25, aspects).collect(Collectors.toList()); executeThreadingTest(_entityServiceImpl, testData, 1); // Expected aspects - Set> generatedAspectIds = testData.stream() + Set> generatedAspectIds = + testData.stream() .flatMap(Collection::stream) .map(mcp -> Triple.of(mcp.getEntityUrn().toString(), mcp.getAspectName(), 0L)) .collect(Collectors.toSet()); // Actual inserts - Set> actualAspectIds = server.sqlQuery( - "select urn, aspect, version from metadata_aspect_v2").findList().stream() - .map(row -> Triple.of(row.getString("urn"), row.getString("aspect"), row.getLong("version"))) + Set> actualAspectIds = + server.sqlQuery("select urn, aspect, version from metadata_aspect_v2").findList().stream() + .map( + row -> + Triple.of( + row.getString("urn"), row.getString("aspect"), row.getLong("version"))) .collect(Collectors.toSet()); // Assert State - Set> additions = actualAspectIds.stream() + Set> additions = + actualAspectIds.stream() .filter(id -> !generatedAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); + assertEquals( + additions.size(), 0, String.format("Expected no additional aspects. Found: %s", additions)); - Set> missing = generatedAspectIds.stream() + Set> missing = + generatedAspectIds.stream() .filter(id -> !actualAspectIds.contains(id)) .collect(Collectors.toSet()); - assertEquals(missing.size(), 0, String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); + assertEquals( + missing.size(), + 0, + String.format("Expected all generated aspects to be inserted. Missing: %s", missing)); } - private static void executeThreadingTest(EntityServiceImpl entityService, List> testData, - int threadCount) { + private static void executeThreadingTest( + EntityServiceImpl entityService, + List> testData, + int threadCount) { Database server = ((EbeanAspectDao) entityService._aspectDao).getServer(); server.sqlUpdate("truncate metadata_aspect_v2"); - int count = Objects.requireNonNull(server.sqlQuery( - "select count(*) as cnt from metadata_aspect_v2").findOne()).getInteger("cnt"); + int count = + Objects.requireNonNull( + server.sqlQuery("select count(*) as cnt from metadata_aspect_v2").findOne()) + .getInteger("cnt"); assertEquals(count, 0, "Expected exactly 0 rows at the start."); // Create ingest proposals in parallel, mimic the smoke-test ingestion - final LinkedBlockingQueue> queue = new LinkedBlockingQueue<>(threadCount * 2); + final LinkedBlockingQueue> queue = + new LinkedBlockingQueue<>(threadCount * 2); // Spin up workers - List writeThreads = IntStream.range(0, threadCount) + List writeThreads = + IntStream.range(0, threadCount) .mapToObj(threadId -> new Thread(new MultiThreadTestWorker(queue, entityService))) .collect(Collectors.toList()); writeThreads.forEach(Thread::start); - testData.forEach(mcps -> { - try { - queue.put(mcps); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + testData.forEach( + mcps -> { + try { + queue.put(mcps); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Terminate workers with empty mcp - IntStream.range(0, threadCount).forEach(threadId -> { - try { - queue.put(List.of()); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + IntStream.range(0, threadCount) + .forEach( + threadId -> { + try { + queue.put(List.of()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); // Wait for threads to finish - writeThreads.forEach(thread -> { - try { - thread.join(10000); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }); + writeThreads.forEach( + thread -> { + try { + thread.join(10000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); } private static class MultiThreadTestWorker implements Runnable { private final EntityServiceImpl entityService; private final LinkedBlockingQueue> queue; - public MultiThreadTestWorker(LinkedBlockingQueue> queue, EntityServiceImpl entityService) { + public MultiThreadTestWorker( + LinkedBlockingQueue> queue, EntityServiceImpl entityService) { this.queue = queue; this.entityService = entityService; } @@ -404,9 +446,8 @@ public void run() { final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)); auditStamp.setTime(System.currentTimeMillis()); - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(mcps, entityService.getEntityRegistry()) - .build(); + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(mcps, entityService.getEntityRegistry()).build(); entityService.ingestProposal(batch, auditStamp, false); } } catch (InterruptedException | URISyntaxException ie) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index f205adc128ed2..f03811da35ea8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -68,1417 +72,1586 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - /** * A class to test {@link EntityServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and {@link RetentionService} and implements {@code @BeforeMethod} etc - * to set up and tear down state. + *

This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and {@link + * RetentionService} and implements {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + *

If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param {@link AspectDao} implementation. * @param {@link RetentionService} implementation. */ -abstract public class EntityServiceTest { - - protected EntityServiceImpl _entityServiceImpl; - protected T_AD _aspectDao; - protected T_RS _retentionService; - - protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); - protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); - protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - protected final EntityRegistry _testEntityRegistry = - new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); - protected EventProducer _mockProducer; - protected UpdateIndicesService _mockUpdateIndicesService; - - protected EntityServiceTest() throws EntityRegistryException { - } - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListLatestAspects() throws Exception; - - // This test had to be split out because Cassandra relational databases have different result ordering restrictions - @Test - abstract public void testIngestListUrns() throws Exception; - - // This test had to be split out because Cassandra doesn't support nested transactions - @Test - abstract public void testNestedTransactions() throws Exception; - - @Test - public void testIngestGetEntity() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), +public abstract class EntityServiceTest { + + protected EntityServiceImpl _entityServiceImpl; + protected T_AD _aspectDao; + protected T_RS _retentionService; + + protected static final AuditStamp TEST_AUDIT_STAMP = AspectGenerationUtils.createAuditStamp(); + protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); + protected final EntityRegistry _configEntityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + protected final EntityRegistry _testEntityRegistry = + new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); + protected EventProducer _mockProducer; + protected UpdateIndicesService _mockUpdateIndicesService; + + protected EntityServiceTest() throws EntityRegistryException {} + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListLatestAspects() throws Exception; + + // This test had to be split out because Cassandra relational databases have different result + // ordering restrictions + @Test + public abstract void testIngestListUrns() throws Exception; + + // This test had to be split out because Cassandra doesn't support nested transactions + @Test + public abstract void testNestedTransactions() throws Exception; + + @Test + public void testIngestGetEntity() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testAddKey() throws Exception { - // Test Writing a CorpUser Key - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - // 1. Ingest Entity - _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); - - // 2. Retrieve Entity - com.linkedin.entity.Entity readEntity = _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); - - // 3. Compare Entity Objects - assertEquals(readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testAddKey() throws Exception { + // Test Writing a CorpUser Key + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + com.linkedin.entity.Entity writeEntity = createCorpUserEntity(entityUrn, "tester@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + // 1. Ingest Entity + _entityServiceImpl.ingestEntity(writeEntity, TEST_AUDIT_STAMP, metadata1); + + // 2. Retrieve Entity + com.linkedin.entity.Entity readEntity = + _entityServiceImpl.getEntity(entityUrn, Collections.emptySet()); + + // 3. Compare Entity Objects + assertEquals( + readEntity.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey = new CorpUserKey(); - expectedKey.setUsername("test"); - assertTrue(DataTemplateUtil.areEqual(expectedKey, - readEntity.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestGetEntities() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map readEntities = - _entityServiceImpl.getEntities(ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); - - // 3. Compare Entity Objects - - // Entity 1 - com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); - assertEquals(readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - assertTrue(DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), + CorpUserKey expectedKey = new CorpUserKey(); + expectedKey.setUsername("test"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey, + readEntity + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntities() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map readEntities = + _entityServiceImpl.getEntities( + ImmutableSet.of(entityUrn1, entityUrn2), Collections.emptySet()); + + // 3. Compare Entity Objects + + // Entity 1 + com.linkedin.entity.Entity readEntity1 = readEntities.get(entityUrn1); + assertEquals( + readEntity1.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0), readEntity1.getValue().getCorpUserSnapshot().getAspects().get(1))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, - readEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - // Entity 2 - com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); - assertEquals(readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. - Optional writer2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - Optional reader2UserInfo = writeEntity2.getValue().getCorpUserSnapshot().getAspects() - .stream().filter(CorpUserAspect::isCorpUserInfo).findAny(); - - assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); - assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); - assertTrue(DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), "UserInfo's are the same"); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, - readEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserKey())); // Key + Info aspect. - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), - mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), - mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - } + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey1, + readEntity1 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + // Entity 2 + com.linkedin.entity.Entity readEntity2 = readEntities.get(entityUrn2); + assertEquals( + readEntity2.getValue().getCorpUserSnapshot().getAspects().size(), 2); // Key + Info aspect. + Optional writer2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + Optional reader2UserInfo = + writeEntity2.getValue().getCorpUserSnapshot().getAspects().stream() + .filter(CorpUserAspect::isCorpUserInfo) + .findAny(); + + assertTrue(writer2UserInfo.isPresent(), "Writer2 user info exists"); + assertTrue(reader2UserInfo.isPresent(), "Reader2 user info exists"); + assertTrue( + DataTemplateUtil.areEqual(writer2UserInfo.get(), reader2UserInfo.get()), + "UserInfo's are the same"); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + assertTrue( + DataTemplateUtil.areEqual( + expectedKey2, + readEntity2 + .getValue() + .getCorpUserSnapshot() + .getAspects() + .get(0) + .getCorpUserKey())); // Key + Info aspect. + + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map readEntities = + _entityServiceImpl.getEntitiesV2( + "corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetEntitiesVersionedV2() throws Exception { + // Test Writing a CorpUser Entity + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); + VersionedUrn versionedUrn1 = + new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); + com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); + + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); + VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); + com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); + + String aspectName = "corpUserInfo"; + String keyName = "corpUserKey"; + + // 1. Ingest Entities + _entityServiceImpl.ingestEntities( + ImmutableList.of(writeEntity1, writeEntity2), + TEST_AUDIT_STAMP, + ImmutableList.of(metadata1, metadata2)); + + // 2. Retrieve Entities + Map readEntities = + _entityServiceImpl.getEntitiesVersionedV2( + ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); + + // 3. Compare Entity Objects + + // Entity 1 + EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); + assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); + assertEquals(envelopedAspect1.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect1.getValue().data()))); + CorpUserKey expectedKey1 = new CorpUserKey(); + expectedKey1.setUsername("tester1"); + EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); + + // Entity 2 + EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); + assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. + EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); + assertEquals(envelopedAspect2.getName(), aspectName); + assertTrue( + DataTemplateUtil.areEqual( + writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), + new CorpUserInfo(envelopedAspect2.getValue().data()))); + CorpUserKey expectedKey2 = new CorpUserKey(); + expectedKey2.setUsername("tester2"); + EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); + assertTrue( + DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn1), Mockito.any(), Mockito.any()); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn2), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List> pairToIngest = new ArrayList<>(); + + Status writeAspect1 = new Status().setRemoved(false); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); + + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); + pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + Map latestAspects = + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName1, aspectName2))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestAspectsGetLatestAspects() throws Exception { + + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + List> pairToIngest = new ArrayList<>(); + + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + writeAspect1.setCustomProperties(new StringMap()); + String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); + pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - @Test - public void testIngestGetEntitiesV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map readEntities = - _entityServiceImpl.getEntitiesV2("corpuser", ImmutableSet.of(entityUrn1, entityUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(readEntityResponse1.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(readEntityResponse2.getAspects().size(), 2); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - @Test - public void testIngestGetEntitiesVersionedV2() throws Exception { - // Test Writing a CorpUser Entity - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:tester1"); - VersionedUrn versionedUrn1 = new VersionedUrn().setUrn(entityUrn1).setVersionStamp("corpUserInfo:0"); - com.linkedin.entity.Entity writeEntity1 = createCorpUserEntity(entityUrn1, "tester@test.com"); - - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:tester2"); - VersionedUrn versionedUrn2 = new VersionedUrn().setUrn(entityUrn2); - com.linkedin.entity.Entity writeEntity2 = createCorpUserEntity(entityUrn2, "tester2@test.com"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1625792690, "run-123"); - - String aspectName = "corpUserInfo"; - String keyName = "corpUserKey"; - - // 1. Ingest Entities - _entityServiceImpl.ingestEntities(ImmutableList.of(writeEntity1, writeEntity2), TEST_AUDIT_STAMP, - ImmutableList.of(metadata1, metadata2)); - - // 2. Retrieve Entities - Map readEntities = - _entityServiceImpl.getEntitiesVersionedV2(ImmutableSet.of(versionedUrn1, versionedUrn2), ImmutableSet.of(aspectName)); - - // 3. Compare Entity Objects - - // Entity 1 - EntityResponse readEntityResponse1 = readEntities.get(entityUrn1); - assertEquals(2, readEntityResponse1.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect1 = readEntityResponse1.getAspects().get(aspectName); - assertEquals(envelopedAspect1.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity1.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect1.getValue().data()))); - CorpUserKey expectedKey1 = new CorpUserKey(); - expectedKey1.setUsername("tester1"); - EnvelopedAspect envelopedKey1 = readEntityResponse1.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey1, new CorpUserKey(envelopedKey1.getValue().data()))); - - // Entity 2 - EntityResponse readEntityResponse2 = readEntities.get(entityUrn2); - assertEquals(2, readEntityResponse2.getAspects().size()); // Key + Info aspect. - EnvelopedAspect envelopedAspect2 = readEntityResponse2.getAspects().get(aspectName); - assertEquals(envelopedAspect2.getName(), aspectName); - assertTrue( - DataTemplateUtil.areEqual(writeEntity2.getValue().getCorpUserSnapshot().getAspects().get(0).getCorpUserInfo(), - new CorpUserInfo(envelopedAspect2.getValue().data()))); - CorpUserKey expectedKey2 = new CorpUserKey(); - expectedKey2.setUsername("tester2"); - EnvelopedAspect envelopedKey2 = readEntityResponse2.getAspects().get(keyName); - assertTrue(DataTemplateUtil.areEqual(expectedKey2, new CorpUserKey(envelopedKey2.getValue().data()))); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn1), - Mockito.any(), Mockito.any()); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn2), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List> pairToIngest = new ArrayList<>(); - - Status writeAspect1 = new Status().setRemoved(false); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, Status.class)); - - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName2 = AspectGenerationUtils.getAspectName(writeAspect2); - pairToIngest.add(getAspectRecordPair(writeAspect2, CorpUserInfo.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - Map latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(Arrays.asList(aspectName1, aspectName2)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); - assertTrue(DataTemplateUtil.areEqual(writeAspect2, latestAspects.get(aspectName2))); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testReingestAspectsGetLatestAspects() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - List> pairToIngest = new ArrayList<>(); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - writeAspect1.setCustomProperties(new StringMap()); - String aspectName1 = AspectGenerationUtils.getAspectName(writeAspect1); - pairToIngest.add(getAspectRecordPair(writeAspect1, CorpUserInfo.class)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + verifyNoMoreInteractions(_mockProducer); + } - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); + @Test + public void testReingestLineageAspect() throws Exception { - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); + List> pairToIngest = new ArrayList<>(); - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - Map latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, latestAspects.get(aspectName1))); + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); + initialChangeLog.setAspect(aspect); + initialChangeLog.setSystemMetadata(metadata1); - - verifyNoMoreInteractions(_mockProducer); + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(aspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(aspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testReingestLineageProposal() throws Exception { + + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); + + List> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + MetadataChangeProposal mcp1 = new MetadataChangeProposal(); + mcp1.setEntityType(entityUrn.getEntityType()); + GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); + mcp1.setAspect(genericAspect); + mcp1.setEntityUrn(entityUrn); + mcp1.setChangeType(ChangeType.UPSERT); + mcp1.setSystemMetadata(metadata1); + mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); + initialChangeLog.setEntityType(entityUrn.getEntityType()); + initialChangeLog.setEntityUrn(entityUrn); + initialChangeLog.setChangeType(ChangeType.UPSERT); + initialChangeLog.setAspectName(aspectName1); + initialChangeLog.setCreated(TEST_AUDIT_STAMP); + + initialChangeLog.setAspect(genericAspect); + initialChangeLog.setSystemMetadata(metadata1); + + final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); + restateChangeLog.setEntityType(entityUrn.getEntityType()); + restateChangeLog.setEntityUrn(entityUrn); + restateChangeLog.setChangeType(ChangeType.RESTATE); + restateChangeLog.setAspectName(aspectName1); + restateChangeLog.setCreated(TEST_AUDIT_STAMP); + restateChangeLog.setAspect(genericAspect); + restateChangeLog.setSystemMetadata(metadata1); + restateChangeLog.setPreviousAspectValue(genericAspect); + restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); + + Map latestAspects = + _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(List.of(aspectName1))); + assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(initialChangeLog)); + + // Mockito detects the previous invocation and throws an error in verifying the second call + // unless invocations are cleared + clearInvocations(_mockProducer); + + _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.any(), Mockito.eq(restateChangeLog)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestTimeseriesAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + } + + @Test + public void testAsyncProposalVersioned() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProperties datasetProperties = new DatasetProperties(); + datasetProperties.setName("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProperties"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(0)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(1)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testAsyncProposalTimeseries() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + DatasetProfile datasetProfile = new DatasetProfile(); + datasetProfile.setRowCount(1000); + datasetProfile.setColumnCount(15); + datasetProfile.setTimestampMillis(0L); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("datasetProfile"); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + verify(_mockProducer, times(0)) + .produceMetadataChangeProposal(Mockito.eq(entityUrn), Mockito.eq(gmce)); + } + + @Test + public void testUpdateGetAspect() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + // Ingest CorpUserInfo Aspect #2 + writeAspect.setEmail("newemail@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); + + RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testGetAspectAtVersion() throws AssertionError { + // Test Writing a CorpUser Entity + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + AspectSpec corpUserInfoSpec = + _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Validate retrieval of CorpUserInfo Aspect #1 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); + writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); + writtenVersionedAspect1.setVersion(0); + + VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + // Validate retrieval of CorpUserInfo Aspect #2 + _entityServiceImpl.ingestAspects( + entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); + + VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); + writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); + writtenVersionedAspect2.setVersion(0); + + VersionedAspect readAspectVersion2 = + _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); + assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); + verify(_mockProducer, times(2)) + .produceMetadataChangeLog( + Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), Mockito.any()); + + readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); + assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRollbackAspect() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since this run has been overwritten + AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); + rollbackOverwrittenAspect.setRunId("run-123"); + rollbackOverwrittenAspect.setAspectName(aspectName); + rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); + rollbackRecentAspect.setRunId("run-456"); + rollbackRecentAspect.setAspectName(aspectName); + rollbackRecentAspect.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); + + // assert the new most recent aspect is the original one + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); + } + + @Test + public void testRollbackKey() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); + + // assert nothing was deleted + RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); + + RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); + + // this should delete the most recent aspect + AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); + rollbackKeyWithCorrectRunId.setRunId("run-123"); + rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); + rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); + + _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + } + + @Test + public void testRollbackUrn() throws AssertionError { + Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + // Ingest CorpUserInfo Aspect #3 + CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); + + // Ingest CorpUserInfo Aspect #1 Overwrite + CorpUserInfo writeAspect1Overwrite = + AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(keyAspectName) + .aspect(writeKey1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn2) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn3) + .aspectName(aspectName) + .aspect(writeAspect3) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn1) + .aspectName(aspectName) + .aspect(writeAspect1Overwrite) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // this should no-op since the key should have been written in the furst run + AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); + rollbackKeyWithWrongRunId.setRunId("run-456"); + rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); + rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); + + // this should delete all related aspects + _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); + + // assert the new most recent aspect is null + RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); + assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); + + RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); + assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + } + + @Test + public void testIngestGetLatestAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNotNull(mcl.getPreviousAspectValue()); + assertNotNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestGetLatestEnvelopedAspect() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + EnvelopedAspect readAspect1 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + assertTrue( + DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + EnvelopedAspect readAspect2 = + _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); + EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); + EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); + + assertTrue( + DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); + + verify(_mockProducer, times(2)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), Mockito.any()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testIngestSameAspect() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); + + // Ingest CorpUserInfo Aspect #1 + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); + SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); + SystemMetadata metadata3 = + AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #1 + RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); + + ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "corpuser"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.UPSERT); + + verifyNoMoreInteractions(_mockProducer); + + reset(_mockProducer); + + // Ingest CorpUserInfo Aspect #2 + CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect2) + .systemMetadata(metadata2) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + // Validate retrieval of CorpUserInfo Aspect #2 + RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); + EntityAspect readAspectDao2 = + _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); + + assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); + assertFalse( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); + + assertTrue( + DataTemplateUtil.areEqual( + EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); + + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + + verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testRetention() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + Status writeAspect2a = new Status().setRemoved(false); + Status writeAspect2b = new Status().setRemoved(true); + + List items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1b) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2a) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2b) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); + _retentionService.setRetention( + "corpuser", + "status", + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); + + // Ingest CorpUserInfo Aspect again + CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); + // Ingest Status Aspect again + Status writeAspect2c = new Status().setRemoved(false); + + items = + List.of( + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName) + .aspect(writeAspect1c) + .systemMetadata(metadata1) + .build(_testEntityRegistry), + UpsertBatchItem.builder() + .urn(entityUrn) + .aspectName(aspectName2) + .aspect(writeAspect2c) + .systemMetadata(metadata1) + .build(_testEntityRegistry)); + _entityServiceImpl.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); + + // Reset retention policies + _retentionService.setRetention( + null, + null, + new DataHubRetentionConfig() + .setRetention( + new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); + _retentionService.deleteRetention("corpuser", "status"); + // Invoke batch apply + _retentionService.batchApplyRetention(null, null); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + @Test + public void testIngestAspectIfNotPresent() throws AssertionError { + Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); + + // Ingest CorpUserInfo Aspect + CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); + CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); + + String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); + // Ingest Status Aspect + Status writeAspect2 = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2a = new Status().setRemoved(false); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); + Status writeAspect2b = new Status().setRemoved(true); + _entityServiceImpl.ingestAspectIfNotPresent( + entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); + + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); + assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); + + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); + assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); + + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10) + .getTotalCount(), + 1); + assertEquals( + _entityServiceImpl + .listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10) + .getTotalCount(), + 1); + } + + /** + * Equivalence for mocks fails when directly using the object as when converting from + * RecordTemplate from JSON it reorders the fields. This simulates pulling the historical + * SystemMetadata from the previous call. + */ + protected T simulatePullFromDB(T aspect, Class clazz) + throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + } + + @Test + public void testRestoreIndices() throws Exception { + if (this instanceof EbeanEntityServiceTest) { + String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; + Urn entityUrn = UrnUtils.getUrn(urnStr); + List> pairToIngest = new ArrayList<>(); + + final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); + String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); + pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); + + SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); + + _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); + + clearInvocations(_mockProducer); + + RestoreIndicesArgs args = new RestoreIndicesArgs(); + args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); + args.setBatchSize(1); + args.setStart(0); + args.setBatchDelayMs(1L); + args.setNumThreads(1); + args.setUrn(urnStr); + _entityServiceImpl.restoreIndices(args, obj -> {}); + + ArgumentCaptor mclCaptor = + ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); + MetadataChangeLog mcl = mclCaptor.getValue(); + assertEquals(mcl.getEntityType(), "dataset"); + assertNull(mcl.getPreviousAspectValue()); + assertNull(mcl.getPreviousSystemMetadata()); + assertEquals(mcl.getChangeType(), ChangeType.RESTATE); + assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); } - - @Test - public void testReingestLineageAspect() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - GenericAspect aspect = GenericRecordUtils.serializeAspect(pairToIngest.get(0).getSecond()); - - initialChangeLog.setAspect(aspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(aspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(aspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); + } + + @Test + public void testValidateUrn() throws Exception { + // Valid URN + Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); + EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); + + // URN with trailing whitespace + Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); + Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); } - @Test - public void testReingestLineageProposal() throws Exception { - - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); - - List> pairToIngest = new ArrayList<>(); + // Urn purely too long + String stringTooLong = "a".repeat(510); - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - MetadataChangeProposal mcp1 = new MetadataChangeProposal(); - mcp1.setEntityType(entityUrn.getEntityType()); - GenericAspect genericAspect = GenericRecordUtils.serializeAspect(upstreamLineage); - mcp1.setAspect(genericAspect); - mcp1.setEntityUrn(entityUrn); - mcp1.setChangeType(ChangeType.UPSERT); - mcp1.setSystemMetadata(metadata1); - mcp1.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - final MetadataChangeLog initialChangeLog = new MetadataChangeLog(); - initialChangeLog.setEntityType(entityUrn.getEntityType()); - initialChangeLog.setEntityUrn(entityUrn); - initialChangeLog.setChangeType(ChangeType.UPSERT); - initialChangeLog.setAspectName(aspectName1); - initialChangeLog.setCreated(TEST_AUDIT_STAMP); - - initialChangeLog.setAspect(genericAspect); - initialChangeLog.setSystemMetadata(metadata1); - - final MetadataChangeLog restateChangeLog = new MetadataChangeLog(); - restateChangeLog.setEntityType(entityUrn.getEntityType()); - restateChangeLog.setEntityUrn(entityUrn); - restateChangeLog.setChangeType(ChangeType.RESTATE); - restateChangeLog.setAspectName(aspectName1); - restateChangeLog.setCreated(TEST_AUDIT_STAMP); - restateChangeLog.setAspect(genericAspect); - restateChangeLog.setSystemMetadata(metadata1); - restateChangeLog.setPreviousAspectValue(genericAspect); - restateChangeLog.setPreviousSystemMetadata(simulatePullFromDB(metadata1, SystemMetadata.class)); - - Map latestAspects = _entityServiceImpl.getLatestAspectsForUrn( - entityUrn, - new HashSet<>(List.of(aspectName1)) - ); - assertTrue(DataTemplateUtil.areEqual(upstreamLineage, latestAspects.get(aspectName1))); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(initialChangeLog)); - - // Mockito detects the previous invocation and throws an error in verifying the second call unless invocations are cleared - clearInvocations(_mockProducer); - - _entityServiceImpl.ingestProposal(mcp1, TEST_AUDIT_STAMP, false); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.eq(restateChangeLog)); - - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestTimeseriesAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - } - - @Test - public void testAsyncProposalVersioned() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProperties datasetProperties = new DatasetProperties(); - datasetProperties.setName("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProperties"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(0)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(1)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - - @Test - public void testAsyncProposalTimeseries() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - DatasetProfile datasetProfile = new DatasetProfile(); - datasetProfile.setRowCount(1000); - datasetProfile.setColumnCount(15); - datasetProfile.setTimestampMillis(0L); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("datasetProfile"); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetProfileSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProfile); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetProfileSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, true); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - verify(_mockProducer, times(0)).produceMetadataChangeProposal(Mockito.eq(entityUrn), - Mockito.eq(gmce)); - } - - @Test - public void testUpdateGetAspect() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect1 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - // Ingest CorpUserInfo Aspect #2 - writeAspect.setEmail("newemail@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect)), TEST_AUDIT_STAMP, null); - - RecordTemplate readAspect2 = _entityServiceImpl.getAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect, readAspect2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testGetAspectAtVersion() throws AssertionError { - // Test Writing a CorpUser Entity - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - AspectSpec corpUserInfoSpec = _testEntityRegistry.getEntitySpec("corpuser").getAspectSpec("corpUserInfo"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Validate retrieval of CorpUserInfo Aspect #1 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect1)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect1 = new VersionedAspect(); - writtenVersionedAspect1.setAspect(Aspect.create(writeAspect1)); - writtenVersionedAspect1.setVersion(0); - - VersionedAspect readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - // Validate retrieval of CorpUserInfo Aspect #2 - _entityServiceImpl.ingestAspects(entityUrn, List.of(Pair.of(aspectName, writeAspect2)), TEST_AUDIT_STAMP, null); - - VersionedAspect writtenVersionedAspect2 = new VersionedAspect(); - writtenVersionedAspect2.setAspect(Aspect.create(writeAspect2)); - writtenVersionedAspect2.setVersion(0); - - VersionedAspect readAspectVersion2 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, 0); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspectVersion2)); - assertTrue(DataTemplateUtil.areEqual(writtenVersionedAspect2, readAspectVersion2)); - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.eq(corpUserInfoSpec), - Mockito.any()); - - readAspect1 = _entityServiceImpl.getVersionedAspect(entityUrn, aspectName, -1); - assertFalse(DataTemplateUtil.areEqual(writtenVersionedAspect1, readAspect1)); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRollbackAspect() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since this run has been overwritten - AspectRowSummary rollbackOverwrittenAspect = new AspectRowSummary(); - rollbackOverwrittenAspect.setRunId("run-123"); - rollbackOverwrittenAspect.setAspectName(aspectName); - rollbackOverwrittenAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-123", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackRecentAspect = new AspectRowSummary(); - rollbackRecentAspect.setRunId("run-456"); - rollbackRecentAspect.setAspectName(aspectName); - rollbackRecentAspect.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackOverwrittenAspect), "run-456", true); - - // assert the new most recent aspect is the original one - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readNewRecentAspect)); - } - - @Test - public void testRollbackKey() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("corpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithWrongRunId), "run-456", true); - - // assert nothing was deleted - RecordTemplate readAspectOriginal = _entityServiceImpl.getAspect(entityUrn1, aspectName, 1); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspectOriginal)); - - RecordTemplate readAspectOverwrite = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(writeAspect1Overwrite, readAspectOverwrite)); - - // this should delete the most recent aspect - AspectRowSummary rollbackKeyWithCorrectRunId = new AspectRowSummary(); - rollbackKeyWithCorrectRunId.setRunId("run-123"); - rollbackKeyWithCorrectRunId.setAspectName("corpUserKey"); - rollbackKeyWithCorrectRunId.setUrn(entityUrn1.toString()); - - _entityServiceImpl.rollbackRun(ImmutableList.of(rollbackKeyWithCorrectRunId), "run-123", true); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - } - - @Test - public void testRollbackUrn() throws AssertionError { - Urn entityUrn1 = UrnUtils.getUrn("urn:li:corpuser:test1"); - Urn entityUrn2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - Urn entityUrn3 = UrnUtils.getUrn("urn:li:corpuser:test3"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - String keyAspectName = _entityServiceImpl.getKeyAspectName(entityUrn1); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - RecordTemplate writeKey1 = EntityUtils.buildKeyAspect(_testEntityRegistry, entityUrn1); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - // Ingest CorpUserInfo Aspect #3 - CorpUserInfo writeAspect3 = AspectGenerationUtils.createCorpUserInfo("email3@test.com"); - - // Ingest CorpUserInfo Aspect #1 Overwrite - CorpUserInfo writeAspect1Overwrite = AspectGenerationUtils.createCorpUserInfo("email1.overwrite@test.com"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(keyAspectName) - .aspect(writeKey1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn2) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn3) - .aspectName(aspectName) - .aspect(writeAspect3) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn1) - .aspectName(aspectName) - .aspect(writeAspect1Overwrite) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // this should no-op since the key should have been written in the furst run - AspectRowSummary rollbackKeyWithWrongRunId = new AspectRowSummary(); - rollbackKeyWithWrongRunId.setRunId("run-456"); - rollbackKeyWithWrongRunId.setAspectName("CorpUserKey"); - rollbackKeyWithWrongRunId.setUrn(entityUrn1.toString()); - - // this should delete all related aspects - _entityServiceImpl.deleteUrn(UrnUtils.getUrn("urn:li:corpuser:test1")); - - // assert the new most recent aspect is null - RecordTemplate readNewRecentAspect = _entityServiceImpl.getAspect(entityUrn1, aspectName, 0); - assertTrue(DataTemplateUtil.areEqual(null, readNewRecentAspect)); - - RecordTemplate deletedKeyAspect = _entityServiceImpl.getAspect(entityUrn1, "corpUserKey", 0); - assertTrue(DataTemplateUtil.areEqual(null, deletedKeyAspect)); + Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - @Test - public void testIngestGetLatestAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNotNull(mcl.getPreviousAspectValue()); - assertNotNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); + // Urn too long when URL encoded + StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); + StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); + for (int i = 0; i < 200; i++) { + buildStringTooLongWhenEncoded.append('>'); + buildStringSameLengthWhenEncoded.append('a'); } - - @Test - public void testIngestGetLatestEnvelopedAspect() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - EnvelopedAspect readAspect1 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, new CorpUserInfo(readAspect1.getValue().data()))); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email2@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - EnvelopedAspect readAspect2 = _entityServiceImpl.getLatestEnvelopedAspect("corpuser", entityUrn, aspectName); - EntityAspect readAspectDao1 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 1); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, 0); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, new CorpUserInfo(readAspect2.getValue().data()))); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao1.getSystemMetadata()), metadata1)); - - verify(_mockProducer, times(2)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), Mockito.any()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testIngestSameAspect() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test"); - - // Ingest CorpUserInfo Aspect #1 - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - String aspectName = AspectGenerationUtils.getAspectName(writeAspect1); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(1625792689, "run-123"); - SystemMetadata metadata2 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-456"); - SystemMetadata metadata3 = AspectGenerationUtils.createSystemMetadata(1635792689, "run-123", "run-456"); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #1 - RecordTemplate readAspect1 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - assertTrue(DataTemplateUtil.areEqual(writeAspect1, readAspect1)); - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "corpuser"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.UPSERT); - - verifyNoMoreInteractions(_mockProducer); - - reset(_mockProducer); - - // Ingest CorpUserInfo Aspect #2 - CorpUserInfo writeAspect2 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect2) - .systemMetadata(metadata2) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - // Validate retrieval of CorpUserInfo Aspect #2 - RecordTemplate readAspect2 = _entityServiceImpl.getLatestAspect(entityUrn, aspectName); - EntityAspect readAspectDao2 = _aspectDao.getAspect(entityUrn.toString(), aspectName, ASPECT_LATEST_VERSION); - - assertTrue(DataTemplateUtil.areEqual(writeAspect2, readAspect2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata2)); - assertFalse(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata1)); - - assertTrue(DataTemplateUtil.areEqual(EntityUtils.parseSystemMetadata(readAspectDao2.getSystemMetadata()), metadata3)); - - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - - verifyNoMoreInteractions(_mockProducer); - } - - @Test - public void testRetention() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - Status writeAspect2a = new Status().setRemoved(false); - Status writeAspect2b = new Status().setRemoved(true); - - List items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1b) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2a) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2b) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 1), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(2)))); - _retentionService.setRetention("corpuser", "status", new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(4)))); - - // Ingest CorpUserInfo Aspect again - CorpUserInfo writeAspect1c = AspectGenerationUtils.createCorpUserInfo("email_c@test.com"); - // Ingest Status Aspect again - Status writeAspect2c = new Status().setRemoved(false); - - items = List.of( - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName) - .aspect(writeAspect1c) - .systemMetadata(metadata1) - .build(_testEntityRegistry), - UpsertBatchItem.builder() - .urn(entityUrn) - .aspectName(aspectName2) - .aspect(writeAspect2c) - .systemMetadata(metadata1) - .build(_testEntityRegistry) - ); - _entityServiceImpl.ingestAspects(AspectsBatchImpl.builder().items(items).build(), TEST_AUDIT_STAMP, true, true); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1), writeAspect2); - - // Reset retention policies - _retentionService.setRetention(null, null, new DataHubRetentionConfig().setRetention( - new Retention().setVersion(new VersionBasedRetention().setMaxVersions(1)))); - _retentionService.deleteRetention("corpuser", "status"); - // Invoke batch apply - _retentionService.batchApplyRetention(null, null); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); - } - - @Test - public void testIngestAspectIfNotPresent() throws AssertionError { - Urn entityUrn = UrnUtils.getUrn("urn:li:corpuser:test1"); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - String aspectName = AspectGenerationUtils.getAspectName(new CorpUserInfo()); - - // Ingest CorpUserInfo Aspect - CorpUserInfo writeAspect1 = AspectGenerationUtils.createCorpUserInfo("email@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1a = AspectGenerationUtils.createCorpUserInfo("email_a@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1a, TEST_AUDIT_STAMP, metadata1); - CorpUserInfo writeAspect1b = AspectGenerationUtils.createCorpUserInfo("email_b@test.com"); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName, writeAspect1b, TEST_AUDIT_STAMP, metadata1); - - String aspectName2 = AspectGenerationUtils.getAspectName(new Status()); - // Ingest Status Aspect - Status writeAspect2 = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2a = new Status().setRemoved(false); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2a, TEST_AUDIT_STAMP, metadata1); - Status writeAspect2b = new Status().setRemoved(true); - _entityServiceImpl.ingestAspectIfNotPresent(entityUrn, aspectName2, writeAspect2b, TEST_AUDIT_STAMP, metadata1); - - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName, 0), writeAspect1); - assertEquals(_entityServiceImpl.getAspect(entityUrn, aspectName2, 0), writeAspect2); - - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName, 1)); - assertNull(_entityServiceImpl.getAspect(entityUrn, aspectName2, 1)); - - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName, 0, 10).getTotalCount(), 1); - assertEquals(_entityServiceImpl.listLatestAspects(entityUrn.getEntityType(), aspectName2, 0, 10).getTotalCount(), 1); + Urn testUrnTooLongWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); + Urn testUrnSameLengthWhenEncoded = + new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); + // Same length when encoded should be allowed, the encoded one should not be + EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); + Assert.fail("Should have raised IllegalArgumentException for URN too long"); + } catch (IllegalArgumentException e) { + assertEquals( + e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); } - /** - * Equivalence for mocks fails when directly using the object as when converting from RecordTemplate from JSON it - * reorders the fields. This simulates pulling the historical SystemMetadata from the previous call. - */ - protected T simulatePullFromDB(T aspect, Class clazz) throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - return RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + // Urn containing disallowed character + Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); + Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); + try { + EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); + Assert.fail( + "Should have raised IllegalArgumentException for URN containing the illegal char"); + } catch (IllegalArgumentException e) { + assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); } - - @Test - public void testRestoreIndices() throws Exception { - if (this instanceof EbeanEntityServiceTest) { - String urnStr = "urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset_unique,PROD)"; - Urn entityUrn = UrnUtils.getUrn(urnStr); - List> pairToIngest = new ArrayList<>(); - - final UpstreamLineage upstreamLineage = AspectGenerationUtils.createUpstreamLineage(); - String aspectName1 = AspectGenerationUtils.getAspectName(upstreamLineage); - pairToIngest.add(getAspectRecordPair(upstreamLineage, UpstreamLineage.class)); - - SystemMetadata metadata1 = AspectGenerationUtils.createSystemMetadata(); - - _entityServiceImpl.ingestAspects(entityUrn, pairToIngest, TEST_AUDIT_STAMP, metadata1); - - clearInvocations(_mockProducer); - - RestoreIndicesArgs args = new RestoreIndicesArgs(); - args.setAspectName(UPSTREAM_LINEAGE_ASPECT_NAME); - args.setBatchSize(1); - args.setStart(0); - args.setBatchDelayMs(1L); - args.setNumThreads(1); - args.setUrn(urnStr); - _entityServiceImpl.restoreIndices(args, obj -> { - }); - - ArgumentCaptor mclCaptor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog( - Mockito.eq(entityUrn), Mockito.any(), mclCaptor.capture()); - MetadataChangeLog mcl = mclCaptor.getValue(); - assertEquals(mcl.getEntityType(), "dataset"); - assertNull(mcl.getPreviousAspectValue()); - assertNull(mcl.getPreviousSystemMetadata()); - assertEquals(mcl.getChangeType(), ChangeType.RESTATE); - assertEquals(mcl.getSystemMetadata().getProperties().get(FORCE_INDEXING_KEY), "true"); - } - } - - @Test - public void testValidateUrn() throws Exception { - // Valid URN - Urn validTestUrn = new Urn("li", "corpuser", new TupleKey("testKey")); - EntityUtils.validateUrn(_testEntityRegistry, validTestUrn); - - // URN with trailing whitespace - Urn testUrnWithTrailingWhitespace = new Urn("li", "corpuser", new TupleKey("testKey ")); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnWithTrailingWhitespace); - Assert.fail("Should have raised IllegalArgumentException for URN with trailing whitespace"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN with leading or trailing whitespace"); - } - - // Urn purely too long - String stringTooLong = "a".repeat(510); - - Urn testUrnTooLong = new Urn("li", "corpuser", new TupleKey(stringTooLong)); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLong); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn too long when URL encoded - StringBuilder buildStringTooLongWhenEncoded = new StringBuilder(); - StringBuilder buildStringSameLengthWhenEncoded = new StringBuilder(); - for (int i = 0; i < 200; i++) { - buildStringTooLongWhenEncoded.append('>'); - buildStringSameLengthWhenEncoded.append('a'); - } - Urn testUrnTooLongWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringTooLongWhenEncoded.toString())); - Urn testUrnSameLengthWhenEncoded = new Urn("li", "corpUser", new TupleKey(buildStringSameLengthWhenEncoded.toString())); - // Same length when encoded should be allowed, the encoded one should not be - EntityUtils.validateUrn(_testEntityRegistry, testUrnSameLengthWhenEncoded); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnTooLongWhenEncoded); - Assert.fail("Should have raised IllegalArgumentException for URN too long"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: cannot provide an URN longer than 512 bytes (when URL encoded)"); - } - - // Urn containing disallowed character - Urn testUrnSpecialCharValid = new Urn("li", "corpUser", new TupleKey("bob␇")); - Urn testUrnSpecialCharInvalid = new Urn("li", "corpUser", new TupleKey("bob␟")); - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharValid); - try { - EntityUtils.validateUrn(_testEntityRegistry, testUrnSpecialCharInvalid); - Assert.fail("Should have raised IllegalArgumentException for URN containing the illegal char"); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Error: URN cannot contain ␟ character"); - } - - Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); - Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("mismatched paren nesting")); - } - - Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidType); - Assert.fail("Should have raised IllegalArgumentException for URN with non-existent entity type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); - } - - Urn validFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); - EntityUtils.validateUrn(_testEntityRegistry, validFabricType); - - Urn invalidFabricType = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); - try { - EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); - Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(invalidFabricType.toString())); - } - - Urn urnEndingInComma = new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); - try { - EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); - Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains(urnEndingInComma.toString())); - } + Urn urnWithMismatchedParens = new Urn("li", "corpuser", new TupleKey("test(Key")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnWithMismatchedParens); + Assert.fail("Should have raised IllegalArgumentException for URN with mismatched parens"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("mismatched paren nesting")); } - @Test - public void testUIPreProcessedProposal() throws Exception { - Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); - EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); - datasetProperties.setDescription("Foo Bar"); - MetadataChangeProposal gmce = new MetadataChangeProposal(); - gmce.setEntityUrn(entityUrn); - gmce.setChangeType(ChangeType.UPSERT); - gmce.setEntityType("dataset"); - gmce.setAspectName("editableDatasetProperties"); - SystemMetadata systemMetadata = new SystemMetadata(); - StringMap properties = new StringMap(); - properties.put(APP_SOURCE, UI_SOURCE); - systemMetadata.setProperties(properties); - gmce.setSystemMetadata(systemMetadata); - JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); - GenericAspect genericAspect = new GenericAspect(); - genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); - genericAspect.setContentType("application/json"); - gmce.setAspect(genericAspect); - _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); - ArgumentCaptor captor = ArgumentCaptor.forClass(MetadataChangeLog.class); - verify(_mockProducer, times(1)).produceMetadataChangeLog(Mockito.eq(entityUrn), - Mockito.any(), captor.capture()); - assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + Urn invalidType = new Urn("li", "fakeMadeUpType", new TupleKey("testKey")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidType); + Assert.fail( + "Should have raised IllegalArgumentException for URN with non-existent entity type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains("Failed to find entity with name fakeMadeUpType")); } - @Nonnull - protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { - CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); - com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); - Snapshot snapshot = new Snapshot(); - CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); - List userAspects = new ArrayList<>(); - userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); - corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); - corpUserSnapshot.setUrn(corpuserUrn); - snapshot.setCorpUserSnapshot(corpUserSnapshot); - entity.setValue(snapshot); - return entity; + Urn validFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD")); + EntityUtils.validateUrn(_testEntityRegistry, validFabricType); + + Urn invalidFabricType = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "prod")); + try { + EntityUtils.validateUrn(_testEntityRegistry, invalidFabricType); + Assert.fail("Should have raised IllegalArgumentException for URN with invalid fabric type"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(invalidFabricType.toString())); } - protected Pair getAspectRecordPair(T aspect, Class clazz) - throws Exception { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - objectMapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); - RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); - return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + Urn urnEndingInComma = + new Urn("li", "dataset", new TupleKey("urn:li:dataPlatform:foo", "bar", "PROD", "")); + try { + EntityUtils.validateUrn(_testEntityRegistry, urnEndingInComma); + Assert.fail("Should have raised IllegalArgumentException for URN ending in comma"); + } catch (IllegalArgumentException e) { + assertTrue(e.getMessage().contains(urnEndingInComma.toString())); } + } + + @Test + public void testUIPreProcessedProposal() throws Exception { + Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); + EditableDatasetProperties datasetProperties = new EditableDatasetProperties(); + datasetProperties.setDescription("Foo Bar"); + MetadataChangeProposal gmce = new MetadataChangeProposal(); + gmce.setEntityUrn(entityUrn); + gmce.setChangeType(ChangeType.UPSERT); + gmce.setEntityType("dataset"); + gmce.setAspectName("editableDatasetProperties"); + SystemMetadata systemMetadata = new SystemMetadata(); + StringMap properties = new StringMap(); + properties.put(APP_SOURCE, UI_SOURCE); + systemMetadata.setProperties(properties); + gmce.setSystemMetadata(systemMetadata); + JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); + byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + GenericAspect genericAspect = new GenericAspect(); + genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); + genericAspect.setContentType("application/json"); + gmce.setAspect(genericAspect); + _entityServiceImpl.ingestProposal(gmce, TEST_AUDIT_STAMP, false); + ArgumentCaptor captor = ArgumentCaptor.forClass(MetadataChangeLog.class); + verify(_mockProducer, times(1)) + .produceMetadataChangeLog(Mockito.eq(entityUrn), Mockito.any(), captor.capture()); + assertEquals(UI_SOURCE, captor.getValue().getSystemMetadata().getProperties().get(APP_SOURCE)); + } + + @Nonnull + protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) + throws Exception { + CorpuserUrn corpuserUrn = CorpuserUrn.createFromUrn(entityUrn); + com.linkedin.entity.Entity entity = new com.linkedin.entity.Entity(); + Snapshot snapshot = new Snapshot(); + CorpUserSnapshot corpUserSnapshot = new CorpUserSnapshot(); + List userAspects = new ArrayList<>(); + userAspects.add(CorpUserAspect.create(AspectGenerationUtils.createCorpUserInfo(email))); + corpUserSnapshot.setAspects(new CorpUserAspectArray(userAspects)); + corpUserSnapshot.setUrn(corpuserUrn); + snapshot.setCorpUserSnapshot(corpUserSnapshot); + entity.setValue(snapshot); + return entity; + } + + protected Pair getAspectRecordPair( + T aspect, Class clazz) throws Exception { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + objectMapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(clazz, objectMapper.writeValueAsString(aspect)); + return new Pair<>(AspectGenerationUtils.getAspectName(aspect), recordTemplate); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java index e90ffd8a4bcb7..680d4079851eb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/TestEntityRegistry.java @@ -14,16 +14,15 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestEntityRegistry implements EntityRegistry { private final Map entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java index c7ab24e87a873..a98386f6f871b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/AspectExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.AssertJUnit.assertEquals; + import com.datahub.test.TestEntityAspect; import com.datahub.test.TestEntityAspectArray; import com.datahub.test.TestEntityInfo; @@ -12,9 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - - public class AspectExtractorTest { @Test public void testExtractor() { @@ -23,7 +22,8 @@ public void testExtractor() { TestEntityKey testEntityKey = TestEntityUtil.getTestEntityKey(urn); TestEntityInfo testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); snapshot.setAspects( - new TestEntityAspectArray(TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); + new TestEntityAspectArray( + TestEntityAspect.create(testEntityKey), TestEntityAspect.create(testEntityInfo))); Map result = AspectExtractor.extractAspectRecords(snapshot); assertEquals(result.size(), 2); assertEquals(result.get("testEntityKey"), testEntityKey); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java index cbc4825a3b557..1adb5d1ab3952 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/extractor/FieldExtractorTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.extractor; +import static org.testng.Assert.assertEquals; + import com.datahub.test.TestEntityInfo; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntitySpecBuilder; @@ -15,65 +17,91 @@ import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; - - public class FieldExtractorTest { @Test public void testExtractor() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map> result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + result = + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of("key1=value1", "key2=value2", "shortValue=123", "longValue=0123456789")); } @Test public void testExtractorMaxValueLength() { EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); AspectSpec testEntityInfoSpec = testEntitySpec.getAspectSpec("testEntityInfo"); - Map nameToSpec = testEntityInfoSpec.getSearchableFieldSpecs() - .stream() - .collect(Collectors.toMap(spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); + Map nameToSpec = + testEntityInfoSpec.getSearchableFieldSpecs().stream() + .collect( + Collectors.toMap( + spec -> spec.getSearchableAnnotation().getFieldName(), Function.identity())); TestEntityInfo testEntityInfo = new TestEntityInfo(); Map> result = - FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); - assertEquals(result, testEntityInfoSpec.getSearchableFieldSpecs() - .stream() + FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs()); + assertEquals( + result, + testEntityInfoSpec.getSearchableFieldSpecs().stream() .collect(Collectors.toMap(Function.identity(), spec -> ImmutableList.of()))); Urn urn = TestEntityUtil.getTestEntityUrn(); testEntityInfo = TestEntityUtil.getTestEntityInfo(urn); - result = FieldExtractor.extractFields(testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); + result = + FieldExtractor.extractFields( + testEntityInfo, testEntityInfoSpec.getSearchableFieldSpecs(), 1); assertEquals(result.get(nameToSpec.get("textFieldOverride")), ImmutableList.of("test")); assertEquals(result.get(nameToSpec.get("foreignKey")), ImmutableList.of()); assertEquals(result.get(nameToSpec.get("nestedForeignKey")), ImmutableList.of(urn)); - assertEquals(result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); + assertEquals( + result.get(nameToSpec.get("textArrayField")), ImmutableList.of("testArray1", "testArray2")); assertEquals(result.get(nameToSpec.get("nestedIntegerField")), ImmutableList.of(1)); - assertEquals(result.get(nameToSpec.get("nestedArrayStringField")), ImmutableList.of("nestedArray1", "nestedArray2")); - assertEquals(result.get(nameToSpec.get("nestedArrayArrayField")), ImmutableList.of("testNestedArray1", "testNestedArray2")); - assertEquals(result.get(nameToSpec.get("customProperties")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); - assertEquals(result.get(nameToSpec.get("esObjectField")), ImmutableList.of(), "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("nestedArrayStringField")), + ImmutableList.of("nestedArray1", "nestedArray2")); + assertEquals( + result.get(nameToSpec.get("nestedArrayArrayField")), + ImmutableList.of("testNestedArray1", "testNestedArray2")); + assertEquals( + result.get(nameToSpec.get("customProperties")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); + assertEquals( + result.get(nameToSpec.get("esObjectField")), + ImmutableList.of(), + "Expected no matching values because of value limit of 1"); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java index 38a20ef4b7a9b..2af1eeb46f2ba 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/EdgeTest.java @@ -1,17 +1,20 @@ package com.linkedin.metadata.graph; +import static org.testng.Assert.*; + import com.linkedin.common.urn.UrnUtils; import java.util.Collections; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EdgeTest { - private static final String SOURCE_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; - private static final String SOURCE_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; - private static final String DESTINATION_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; - private static final String DESTINATION_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; + private static final String SOURCE_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source1,PROD)"; + private static final String SOURCE_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,source2,PROD)"; + private static final String DESTINATION_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination1,PROD)"; + private static final String DESTINATION_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:foo,destination2,PROD)"; private static final String DOWNSTREAM_RELATIONSHIP_TYPE = "DownstreamOf"; private static final Long TIMESTAMP_1 = 1L; private static final Long TIMESTAMP_2 = 2L; @@ -21,39 +24,43 @@ public class EdgeTest { @Test public void testEdgeEquals() { // First edge - final Edge edge1 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge1 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); - // Second edge has same source, destination, and relationship type as edge1, and should be considered the same edge. + // Second edge has same source, destination, and relationship type as edge1, and should be + // considered the same edge. // All other fields are different. - final Edge edge2 = new Edge( - UrnUtils.getUrn(SOURCE_URN_1), - UrnUtils.getUrn(DESTINATION_URN_1), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - TIMESTAMP_2, - UrnUtils.getUrn(ACTOR_URN_2), - Collections.emptyMap()); + final Edge edge2 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_1), + UrnUtils.getUrn(DESTINATION_URN_1), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + TIMESTAMP_2, + UrnUtils.getUrn(ACTOR_URN_2), + Collections.emptyMap()); assertEquals(edge1, edge2); // Third edge has different source and destination as edge1, and thus is not the same edge. - final Edge edge3 = new Edge( - UrnUtils.getUrn(SOURCE_URN_2), - UrnUtils.getUrn(DESTINATION_URN_2), - DOWNSTREAM_RELATIONSHIP_TYPE, - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - TIMESTAMP_1, - UrnUtils.getUrn(ACTOR_URN_1), - Collections.emptyMap()); + final Edge edge3 = + new Edge( + UrnUtils.getUrn(SOURCE_URN_2), + UrnUtils.getUrn(DESTINATION_URN_2), + DOWNSTREAM_RELATIONSHIP_TYPE, + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + TIMESTAMP_1, + UrnUtils.getUrn(ACTOR_URN_1), + Collections.emptyMap()); assertNotEquals(edge1, edge3); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java index 12cd24ae9986d..3a51344d5779d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/GraphServiceTestBase.java @@ -1,5 +1,14 @@ package com.linkedin.metadata.graph; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.DataFlowUrn; import com.linkedin.common.urn.DataJobUrn; @@ -27,64 +36,56 @@ import java.util.stream.IntStream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - - /** - * Base class for testing any GraphService implementation. - * Derive the test class from this base and get your GraphService implementation - * tested with all these tests. + * Base class for testing any GraphService implementation. Derive the test class from this base and + * get your GraphService implementation tested with all these tests. * - * You can add implementation specific tests in derived classes, or add general tests - * here and have all existing implementations tested in the same way. + *

You can add implementation specific tests in derived classes, or add general tests here and + * have all existing implementations tested in the same way. * - * The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated Graph. - * Feel free to add a test to your test implementation that calls `getPopulatedGraphService` and - * asserts the state of the graph in an implementation specific way. + *

The `getPopulatedGraphService` method calls `GraphService.addEdge` to provide a populated + * Graph. Feel free to add a test to your test implementation that calls `getPopulatedGraphService` + * and asserts the state of the graph in an implementation specific way. */ -abstract public class GraphServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class GraphServiceTestBase extends AbstractTestNGSpringContextTests { private static class RelatedEntityComparator implements Comparator { @Override public int compare(RelatedEntity left, RelatedEntity right) { - int cmp = left.relationshipType.compareTo(right.relationshipType); - if (cmp != 0) { - return cmp; - } - return left.urn.compareTo(right.urn); + int cmp = left.relationshipType.compareTo(right.relationshipType); + if (cmp != 0) { + return cmp; + } + return left.urn.compareTo(right.urn); } } - protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = new RelatedEntityComparator(); + protected static final RelatedEntityComparator RELATED_ENTITY_COMPARATOR = + new RelatedEntityComparator(); - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; + protected static String userType = "user"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -94,73 +95,85 @@ public int compare(RelatedEntity left, RelatedEntity right) { protected static String unknownUrnString = "urn:li:unknown:(urn:li:unknown:Unknown)"; - /** - * Some dataset owners. - */ - protected static String userOneUrnString = "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; - protected static String userTwoUrnString = "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; + /** Some dataset owners. */ + protected static String userOneUrnString = + "urn:li:" + userType + ":(urn:li:user:system,Ingress,PROD)"; + + protected static String userTwoUrnString = + "urn:li:" + userType + ":(urn:li:user:individual,UserA,DEV)"; protected static Urn userOneUrn = createFromString(userOneUrnString); protected static Urn userTwoUrn = createFromString(userTwoUrnString); protected static Urn unknownUrn = createFromString(unknownUrnString); - /** - * Some data jobs - */ - protected static Urn dataJobOneUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - protected static Urn dataJobTwoUrn = new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + /** Some data jobs */ + protected static Urn dataJobOneUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job1"); - /** - * Some test relationships. - */ + protected static Urn dataJobTwoUrn = + new DataJobUrn(new DataFlowUrn("orchestrator", "flow", "cluster"), "job2"); + + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String hasOwner = "HasOwner"; protected static String knowsUser = "KnowsUser"; protected static String produces = "Produces"; protected static String consumes = "Consumes"; - protected static Set allRelationshipTypes = new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); - - /** - * Some expected related entities. - */ - protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = new RelatedEntity(downstreamOf, datasetOneUrnString); - protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = new RelatedEntity(downstreamOf, datasetTwoUrnString); - protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = new RelatedEntity(downstreamOf, datasetThreeUrnString); - protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = new RelatedEntity(downstreamOf, datasetFourUrnString); - - protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = new RelatedEntity(hasOwner, datasetOneUrnString); - protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = new RelatedEntity(hasOwner, datasetTwoUrnString); - protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = new RelatedEntity(hasOwner, datasetThreeUrnString); - protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = new RelatedEntity(hasOwner, datasetFourUrnString); - protected static RelatedEntity hasOwnerUserOneRelatedEntity = new RelatedEntity(hasOwner, userOneUrnString); - protected static RelatedEntity hasOwnerUserTwoRelatedEntity = new RelatedEntity(hasOwner, userTwoUrnString); - - protected static RelatedEntity knowsUserOneRelatedEntity = new RelatedEntity(knowsUser, userOneUrnString); - protected static RelatedEntity knowsUserTwoRelatedEntity = new RelatedEntity(knowsUser, userTwoUrnString); - - /** - * Some relationship filters. - */ - protected static RelationshipFilter outgoingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); - protected static RelationshipFilter incomingRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); - protected static RelationshipFilter undirectedRelationships = newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); - - /** - * Any source and destination type value. - */ + protected static Set allRelationshipTypes = + new HashSet<>(Arrays.asList(downstreamOf, hasOwner, knowsUser)); + + /** Some expected related entities. */ + protected static RelatedEntity downstreamOfDatasetOneRelatedEntity = + new RelatedEntity(downstreamOf, datasetOneUrnString); + + protected static RelatedEntity downstreamOfDatasetTwoRelatedEntity = + new RelatedEntity(downstreamOf, datasetTwoUrnString); + protected static RelatedEntity downstreamOfDatasetThreeRelatedEntity = + new RelatedEntity(downstreamOf, datasetThreeUrnString); + protected static RelatedEntity downstreamOfDatasetFourRelatedEntity = + new RelatedEntity(downstreamOf, datasetFourUrnString); + + protected static RelatedEntity hasOwnerDatasetOneRelatedEntity = + new RelatedEntity(hasOwner, datasetOneUrnString); + protected static RelatedEntity hasOwnerDatasetTwoRelatedEntity = + new RelatedEntity(hasOwner, datasetTwoUrnString); + protected static RelatedEntity hasOwnerDatasetThreeRelatedEntity = + new RelatedEntity(hasOwner, datasetThreeUrnString); + protected static RelatedEntity hasOwnerDatasetFourRelatedEntity = + new RelatedEntity(hasOwner, datasetFourUrnString); + protected static RelatedEntity hasOwnerUserOneRelatedEntity = + new RelatedEntity(hasOwner, userOneUrnString); + protected static RelatedEntity hasOwnerUserTwoRelatedEntity = + new RelatedEntity(hasOwner, userTwoUrnString); + + protected static RelatedEntity knowsUserOneRelatedEntity = + new RelatedEntity(knowsUser, userOneUrnString); + protected static RelatedEntity knowsUserTwoRelatedEntity = + new RelatedEntity(knowsUser, userTwoUrnString); + + /** Some relationship filters. */ + protected static RelationshipFilter outgoingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING); + + protected static RelationshipFilter incomingRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING); + protected static RelationshipFilter undirectedRelationships = + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.UNDIRECTED); + + /** Any source and destination type value. */ protected static @Nullable List anyType = null; - /** - * Timeout used to test concurrent ops in doTestConcurrentOp. - */ + /** Timeout used to test concurrent ops in doTestConcurrentOp. */ protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(1); + return Duration.ofMinutes(1); } @BeforeMethod public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -176,26 +189,26 @@ public void testStaticUrns() { } /** - * Provides the current GraphService instance to test. This is being called by the test method - * at most once. The serviced graph should be empty. + * Provides the current GraphService instance to test. This is being called by the test method at + * most once. The serviced graph should be empty. * * @return the GraphService instance to test * @throws Exception on failure */ @Nonnull - abstract protected GraphService getGraphService() throws Exception; + protected abstract GraphService getGraphService() throws Exception; /** - * Allows the specific GraphService test implementation to wait for GraphService writes to - * be synced / become available to reads. + * Allows the specific GraphService test implementation to wait for GraphService writes to be + * synced / become available to reads. * * @throws Exception on failure */ - abstract protected void syncAfterWrite() throws Exception; + protected abstract void syncAfterWrite() throws Exception; /** - * Calls getGraphService to retrieve the test GraphService and populates it - * with edges via `GraphService.addEdge`. + * Calls getGraphService to retrieve the test GraphService and populates it with edges via + * `GraphService.addEdge`. * * @return test GraphService * @throws Exception on failure @@ -203,19 +216,17 @@ public void testStaticUrns() { protected GraphService getPopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List edges = Arrays.asList( + List edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), - new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null) - ); + new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -226,27 +237,24 @@ protected GraphService getPopulatedGraphService() throws Exception { protected GraphService getLineagePopulatedGraphService() throws Exception { GraphService service = getGraphService(); - List edges = Arrays.asList( + List edges = + Arrays.asList( new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null), new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetTwoUrn, userOneUrn, hasOwner, null, null, null, null, null), new Edge(datasetThreeUrn, userTwoUrn, hasOwner, null, null, null, null, null), new Edge(datasetFourUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null), new Edge(userTwoUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(dataJobOneUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetTwoUrn, consumes, null, null, null, null, null), new Edge(dataJobOneUrn, datasetThreeUrn, produces, null, null, null, null, null), new Edge(dataJobOneUrn, datasetFourUrn, produces, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetOneUrn, consumes, null, null, null, null, null), new Edge(dataJobTwoUrn, datasetTwoUrn, consumes, null, null, null, null, null), - new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null) - ); + new Edge(dataJobTwoUrn, dataJobOneUrn, downstreamOf, null, null, null, null, null)); edges.forEach(service::addEdge); syncAfterWrite(); @@ -254,8 +262,7 @@ protected GraphService getLineagePopulatedGraphService() throws Exception { return service; } - protected static @Nullable - Urn createFromString(@Nonnull String rawUrn) { + protected static @Nullable Urn createFromString(@Nonnull String rawUrn) { try { return Urn.createFromString(rawUrn); } catch (URISyntaxException e) { @@ -264,10 +271,12 @@ Urn createFromString(@Nonnull String rawUrn) { } protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, List expected) { - assertEqualsAnyOrder(actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); + assertEqualsAnyOrder( + actual, new RelatedEntitiesResult(0, expected.size(), expected.size(), expected)); } - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { assertEquals(actual.start, expected.start); assertEquals(actual.count, expected.count); assertEquals(actual.total, expected.total); @@ -276,141 +285,156 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie protected void assertEqualsAnyOrder(List actual, List expected) { assertEquals( - actual.stream().sorted().collect(Collectors.toList()), - expected.stream().sorted().collect(Collectors.toList()) - ); + actual.stream().sorted().collect(Collectors.toList()), + expected.stream().sorted().collect(Collectors.toList())); } - protected void assertEqualsAnyOrder(List actual, List expected, Comparator comparator) { + protected void assertEqualsAnyOrder( + List actual, List expected, Comparator comparator) { assertEquals( - actual.stream().sorted(comparator).collect(Collectors.toList()), - expected.stream().sorted(comparator).collect(Collectors.toList()) - ); + actual.stream().sorted(comparator).collect(Collectors.toList()), + expected.stream().sorted(comparator).collect(Collectors.toList())); } @DataProvider(name = "AddEdgeTests") public Object[][] getAddEdgeTests() { - return new Object[][]{ - new Object[]{ - Arrays.asList(), - Arrays.asList(), - Arrays.asList() - }, - new Object[]{ - Arrays.asList(new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null) - ), - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - Arrays.asList( - new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), - new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), - new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), - new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserTwoRelatedEntity - ), - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, - hasOwnerDatasetOneRelatedEntity, - hasOwnerDatasetTwoRelatedEntity, - knowsUserOneRelatedEntity - ) - }, - new Object[]{ - Arrays.asList( - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), - new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null) - ), - Arrays.asList(knowsUserOneRelatedEntity), - Arrays.asList(knowsUserOneRelatedEntity) - } + return new Object[][] { + new Object[] {Arrays.asList(), Arrays.asList(), Arrays.asList()}, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetTwoUrn, datasetThreeUrn, downstreamOf, null, null, null, null, null)), + Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(datasetOneUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null), + new Edge(datasetOneUrn, userOneUrn, hasOwner, null, null, null, null, null), + new Edge(datasetTwoUrn, userTwoUrn, hasOwner, null, null, null, null, null), + new Edge(userOneUrn, userTwoUrn, knowsUser, null, null, null, null, null)), + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, + hasOwnerUserTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserOneRelatedEntity) + }, + new Object[] { + Arrays.asList( + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null), + new Edge(userOneUrn, userOneUrn, knowsUser, null, null, null, null, null)), + Arrays.asList(knowsUserOneRelatedEntity), + Arrays.asList(knowsUserOneRelatedEntity) + } }; } @Test(dataProvider = "AddEdgeTests") - public void testAddEdge(List edges, List expectedOutgoing, List expectedIncoming) throws Exception { - GraphService service = getGraphService(); - - edges.forEach(service::addEdge); - syncAfterWrite(); - - RelatedEntitiesResult relatedOutgoing = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); - - RelatedEntitiesResult relatedIncoming = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships, - 0, 100 - ); - assertEqualsAnyOrder(relatedIncoming, expectedIncoming); + public void testAddEdge( + List edges, List expectedOutgoing, List expectedIncoming) + throws Exception { + GraphService service = getGraphService(); + + edges.forEach(service::addEdge); + syncAfterWrite(); + + RelatedEntitiesResult relatedOutgoing = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedOutgoing, expectedOutgoing); + + RelatedEntitiesResult relatedIncoming = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder(relatedIncoming, expectedIncoming); } @Test public void testPopulatedGraphService() throws Exception { - GraphService service = getPopulatedGraphService(); - - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedOutgoingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100); - assertEqualsAnyOrder( - relatedIncomingEntitiesBeforeRemove, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + GraphService service = getPopulatedGraphService(); + + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedOutgoingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + RelatedEntitiesResult relatedIncomingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedIncomingEntitiesBeforeRemove, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); } @Test public void testPopulatedGraphServiceGetLineage() throws Exception { GraphService service = getLineagePopulatedGraphService(); - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 0); assertEquals(upstreamLineage.getRelationships().size(), 0); - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - Map relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + Map relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); @@ -421,22 +445,25 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetTwoUrn)); assertEquals(relationships.get(datasetTwoUrn).getType(), downstreamOf); assertTrue(relationships.containsKey(dataJobOneUrn)); assertEquals(relationships.get(dataJobOneUrn).getType(), produces); - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 0); assertEquals(downstreamLineage.getRelationships().size(), 0); upstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.UPSTREAM, 0, 1000, 1); assertEquals(upstreamLineage.getTotal().intValue(), 2); assertEquals(upstreamLineage.getRelationships().size(), 2); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetOneUrn)); assertEquals(relationships.get(datasetOneUrn).getType(), consumes); assertTrue(relationships.containsKey(datasetTwoUrn)); @@ -445,8 +472,9 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { downstreamLineage = service.getLineage(dataJobOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 1); assertEquals(downstreamLineage.getTotal().intValue(), 3); assertEquals(downstreamLineage.getRelationships().size(), 3); - relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); + relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); assertTrue(relationships.containsKey(datasetThreeUrn)); assertEquals(relationships.get(datasetThreeUrn).getType(), produces); assertTrue(relationships.containsKey(datasetFourUrn)); @@ -458,458 +486,425 @@ public void testPopulatedGraphServiceGetLineage() throws Exception { @DataProvider(name = "FindRelatedEntitiesSourceEntityFilterTests") public Object[][] getFindRelatedEntitiesSourceEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceEntityFilterTests") - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - sourceEntityFilter, - EMPTY_FILTER, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + sourceEntityFilter, + EMPTY_FILTER, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationEntityFilterTests") public Object[][] getFindRelatedEntitiesDestinationEntityFilterTests() { return new Object[][] { - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - newFilter("urn", datasetTwoUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity) - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity) - } + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", datasetTwoUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList() + }, + new Object[] { + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationEntityFilterTests") - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - EMPTY_FILTER, - destinationEntityFilter, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + EMPTY_FILTER, + destinationEntityFilter, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final Filter sourceEntityFilter, - final Filter destinationEntityFilter, - List relationshipTypes, - final RelationshipFilter relationshipFilter, - List expectedRelatedEntities - ) throws Exception { + final Filter sourceEntityFilter, + final Filter destinationEntityFilter, + List relationshipTypes, + final RelationshipFilter relationshipFilter, + List expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, sourceEntityFilter, - anyType, destinationEntityFilter, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + sourceEntityFilter, + anyType, + destinationEntityFilter, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesSourceTypeTests") public Object[][] getFindRelatedEntitiesSourceTypeTests() { - return new Object[][]{ - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - // "" used to be any type before v0.9.0, which is now encoded by null - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[]{ - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[]{ - userType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList() - }, - - new Object[]{ - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[]{ - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - } + return new Object[][] { + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + + // "" used to be any type before v0.9.0, which is now encoded by null + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {userType, Arrays.asList(downstreamOf), outgoingRelationships, Arrays.asList()}, + new Object[] {userType, Arrays.asList(downstreamOf), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, Arrays.asList(downstreamOf), undirectedRelationships, Arrays.asList() + }, + new Object[] {userType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesSourceTypeTests") - public void testFindRelatedEntitiesSourceType(String entityTypeFilter, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String entityTypeFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - anyType, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + anyType, + relationshipTypes, + relationships, + expectedRelatedEntities); } @DataProvider(name = "FindRelatedEntitiesDestinationTypeTests") public Object[][] getFindRelatedEntitiesDestinationTypeTests() { return new Object[][] { - new Object[] { - null, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - null, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - "", - Arrays.asList(downstreamOf), - outgoingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - incomingRelationships, - Collections.emptyList() - }, - new Object[] { - "", - Arrays.asList(downstreamOf), - undirectedRelationships, - Collections.emptyList() - }, - - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetType, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity - ) - }, - - new Object[] { - datasetType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList() - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - new Object[] { - datasetType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList( - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, - hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity - ) - }, - - new Object[] { - userType, - Arrays.asList(hasOwner), - outgoingRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - incomingRelationships, - Arrays.asList() - }, - new Object[] { - userType, - Arrays.asList(hasOwner), - undirectedRelationships, - Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) - } + new Object[] { + null, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + null, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + "", Arrays.asList(downstreamOf), outgoingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), incomingRelationships, Collections.emptyList() + }, + new Object[] { + "", Arrays.asList(downstreamOf), undirectedRelationships, Collections.emptyList() + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] {datasetType, Arrays.asList(hasOwner), outgoingRelationships, Arrays.asList()}, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + incomingRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + datasetType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity) + }, + new Object[] { + userType, + Arrays.asList(hasOwner), + outgoingRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + }, + new Object[] {userType, Arrays.asList(hasOwner), incomingRelationships, Arrays.asList()}, + new Object[] { + userType, + Arrays.asList(hasOwner), + undirectedRelationships, + Arrays.asList(hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity) + } }; } @Test(dataProvider = "FindRelatedEntitiesDestinationTypeTests") - public void testFindRelatedEntitiesDestinationType(String entityTypeFilter, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String entityTypeFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { doTestFindRelatedEntities( - anyType, - entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, - relationshipTypes, - relationships, - expectedRelatedEntities - ); + anyType, + entityTypeFilter != null ? ImmutableList.of(entityTypeFilter) : null, + relationshipTypes, + relationships, + expectedRelatedEntities); } private void doTestFindRelatedEntities( - final List sourceType, - final List destinationType, - final List relationshipTypes, - final RelationshipFilter relationshipFilter, - List expectedRelatedEntities - ) throws Exception { + final List sourceType, + final List destinationType, + final List relationshipTypes, + final RelationshipFilter relationshipFilter, + List expectedRelatedEntities) + throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - relationshipTypes, relationshipFilter, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + relationshipTypes, + relationshipFilter, + 0, + 10); assertEqualsAnyOrder(relatedEntities, expectedRelatedEntities); } - private void doTestFindRelatedEntitiesEntityType(@Nullable List sourceType, - @Nullable List destinationType, - @Nonnull String relationshipType, - @Nonnull RelationshipFilter relationshipFilter, - @Nonnull GraphService service, - @Nonnull RelatedEntity... expectedEntities) { - RelatedEntitiesResult actualEntities = service.findRelatedEntities( - sourceType, EMPTY_FILTER, - destinationType, EMPTY_FILTER, - Arrays.asList(relationshipType), relationshipFilter, - 0, 100 - ); + private void doTestFindRelatedEntitiesEntityType( + @Nullable List sourceType, + @Nullable List destinationType, + @Nonnull String relationshipType, + @Nonnull RelationshipFilter relationshipFilter, + @Nonnull GraphService service, + @Nonnull RelatedEntity... expectedEntities) { + RelatedEntitiesResult actualEntities = + service.findRelatedEntities( + sourceType, + EMPTY_FILTER, + destinationType, + EMPTY_FILTER, + Arrays.asList(relationshipType), + relationshipFilter, + 0, + 100); assertEqualsAnyOrder(actualEntities, Arrays.asList(expectedEntities)); } @@ -921,18 +916,41 @@ public void testFindRelatedEntitiesNullSourceType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test @@ -943,97 +961,143 @@ public void testFindRelatedEntitiesNullDestinationType() throws Exception { assertNotNull(nullUrn); RelatedEntity nullRelatedEntity = new RelatedEntity(downstreamOf, nullUrn.toString()); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, null, downstreamOf, outgoingRelationships, service); - service.addEdge(new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); + service.addEdge( + new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + downstreamOfDatasetOneRelatedEntity); service.addEdge(new Edge(datasetOneUrn, nullUrn, downstreamOf, null, null, null, null, null)); syncAfterWrite(); - doTestFindRelatedEntitiesEntityType(anyType, ImmutableList.of("null"), downstreamOf, outgoingRelationships, service, nullRelatedEntity); - doTestFindRelatedEntitiesEntityType(anyType, null, downstreamOf, outgoingRelationships, service, nullRelatedEntity, downstreamOfDatasetOneRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + ImmutableList.of("null"), + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity); + doTestFindRelatedEntitiesEntityType( + anyType, + null, + downstreamOf, + outgoingRelationships, + service, + nullRelatedEntity, + downstreamOfDatasetOneRelatedEntity); } @Test public void testFindRelatedEntitiesRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult allOutgoingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); - assertEqualsAnyOrder( - allOutgoingRelatedEntities, - Arrays.asList( - downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allIncomingRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships, - 0, 100 - ); + RelatedEntitiesResult allOutgoingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - allIncomingRelatedEntities, - Arrays.asList( - downstreamOfDatasetTwoRelatedEntity, downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity, - hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, hasOwnerDatasetThreeRelatedEntity, hasOwnerDatasetFourRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); - - RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), outgoingRelationships, - 0, 100 - ); + allOutgoingRelatedEntities, + Arrays.asList( + downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity, + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allIncomingRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + incomingRelationships, + 0, + 100); assertEqualsAnyOrder( - allUnknownRelationshipTypeRelatedEntities, - Collections.emptyList() - ); - - RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList("unknownRelationshipType", downstreamOf), outgoingRelationships, - 0, 100 - ); + allIncomingRelatedEntities, + Arrays.asList( + downstreamOfDatasetTwoRelatedEntity, + downstreamOfDatasetThreeRelatedEntity, + downstreamOfDatasetFourRelatedEntity, + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + hasOwnerDatasetThreeRelatedEntity, + hasOwnerDatasetFourRelatedEntity, + knowsUserOneRelatedEntity, + knowsUserTwoRelatedEntity)); + + RelatedEntitiesResult allUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", "unseenRelationshipType"), + outgoingRelationships, + 0, + 100); + assertEqualsAnyOrder(allUnknownRelationshipTypeRelatedEntities, Collections.emptyList()); + + RelatedEntitiesResult someUnknownRelationshipTypeRelatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList("unknownRelationshipType", downstreamOf), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder( - someUnknownRelationshipTypeRelatedEntities, - Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity) - ); + someUnknownRelationshipTypeRelatedEntities, + Arrays.asList(downstreamOfDatasetOneRelatedEntity, downstreamOfDatasetTwoRelatedEntity)); } @Test public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Collections.emptyList(), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Collections.emptyList(), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not get any related urns? - RelatedEntitiesResult relatedEntitiesAll = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 10 - ); + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not get any related urns? + RelatedEntitiesResult relatedEntitiesAll = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 10); assertNotEquals(relatedEntitiesAll.entities, Collections.emptyList()); } @@ -1042,21 +1106,29 @@ public void testFindRelatedEntitiesNoRelationshipTypes() throws Exception { public void testFindRelatedEntitiesAllFilters() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1065,21 +1137,29 @@ public void testFindRelatedEntitiesAllFilters() throws Exception { public void testFindRelatedEntitiesMultipleEntityTypes() throws Exception { GraphService service = getPopulatedGraphService(); - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userOneUrnString), - Arrays.asList(hasOwner), outgoingRelationships, - 0, 10 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userOneUrnString), + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Arrays.asList(hasOwnerUserOneRelatedEntity)); - relatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType, userType), newFilter("urn", datasetOneUrnString), - ImmutableList.of(datasetType, userType), newFilter("urn", userTwoUrnString), - Arrays.asList(hasOwner), incomingRelationships, - 0, 10 - ); + relatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType, userType), + newFilter("urn", datasetOneUrnString), + ImmutableList.of(datasetType, userType), + newFilter("urn", userTwoUrnString), + Arrays.asList(hasOwner), + incomingRelationships, + 0, + 10); assertEquals(relatedEntities.entities, Collections.emptyList()); } @@ -1089,161 +1169,227 @@ public void testFindRelatedEntitiesOffsetAndCount() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult allRelatedEntities = service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ); + RelatedEntitiesResult allRelatedEntities = + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); List individualRelatedEntities = new ArrayList<>(); IntStream.range(0, allRelatedEntities.entities.size()) - .forEach(idx -> individualRelatedEntities.addAll( + .forEach( + idx -> + individualRelatedEntities.addAll( service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - idx, 1 - ).entities - )); + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + idx, + 1) + .entities)); Assert.assertEquals(individualRelatedEntities, allRelatedEntities.entities); } @DataProvider(name = "RemoveEdgesFromNodeTests") public Object[][] getRemoveEdgesFromNodeTests() { return new Object[][] { - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - outgoingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - incomingRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(), - }, - new Object[] { - datasetTwoUrn, - Arrays.asList(downstreamOf), - undirectedRelationships, - Arrays.asList(downstreamOfDatasetOneRelatedEntity), - Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), - Arrays.asList(), - Arrays.asList() - }, - - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - outgoingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity) - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - incomingRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList() - }, - new Object[] { - userOneUrn, - Arrays.asList(hasOwner, knowsUser), - undirectedRelationships, - Arrays.asList(knowsUserTwoRelatedEntity), - Arrays.asList(hasOwnerDatasetOneRelatedEntity, hasOwnerDatasetTwoRelatedEntity, knowsUserTwoRelatedEntity), - Arrays.asList(), - Arrays.asList() - } + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + outgoingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity) + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + incomingRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(), + }, + new Object[] { + datasetTwoUrn, + Arrays.asList(downstreamOf), + undirectedRelationships, + Arrays.asList(downstreamOfDatasetOneRelatedEntity), + Arrays.asList(downstreamOfDatasetThreeRelatedEntity, downstreamOfDatasetFourRelatedEntity), + Arrays.asList(), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + outgoingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity) + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + incomingRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList() + }, + new Object[] { + userOneUrn, + Arrays.asList(hasOwner, knowsUser), + undirectedRelationships, + Arrays.asList(knowsUserTwoRelatedEntity), + Arrays.asList( + hasOwnerDatasetOneRelatedEntity, + hasOwnerDatasetTwoRelatedEntity, + knowsUserTwoRelatedEntity), + Arrays.asList(), + Arrays.asList() + } }; } @Test(dataProvider = "RemoveEdgesFromNodeTests") - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, - @Nonnull List relationTypes, - @Nonnull RelationshipFilter relationshipFilter, - List expectedOutgoingRelatedUrnsBeforeRemove, - List expectedIncomingRelatedUrnsBeforeRemove, - List expectedOutgoingRelatedUrnsAfterRemove, - List expectedIncomingRelatedUrnsAfterRemove) throws Exception { + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List expectedOutgoingRelatedUrnsBeforeRemove, + List expectedIncomingRelatedUrnsBeforeRemove, + List expectedOutgoingRelatedUrnsAfterRemove, + List expectedIncomingRelatedUrnsAfterRemove) + throws Exception { GraphService service = getPopulatedGraphService(); List allOtherRelationTypes = - allRelationshipTypes.stream() - .filter(relation -> !relationTypes.contains(relation)) - .collect(Collectors.toList()); + allRelationshipTypes.stream() + .filter(relation -> !relationTypes.contains(relation)) + .collect(Collectors.toList()); assertTrue(allOtherRelationTypes.size() > 0); - RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsBeforeRemove, expectedOutgoingRelatedUrnsBeforeRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove); // we expect these do not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); - service.removeEdgesFromNode( - nodeToRemoveFrom, - relationTypes, - relationshipFilter - ); + service.removeEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter); syncAfterWrite(); - RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - relationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); - assertEqualsAnyOrder(actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + RelatedEntitiesResult actualOutgoingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult actualIncomingRelatedUrnsAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + relationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + actualOutgoingRelatedUrnsAfterRemove, expectedOutgoingRelatedUrnsAfterRemove); + assertEqualsAnyOrder( + actualIncomingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); // assert these did not change - RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, outgoingRelationships, - 0, 100); - RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - allOtherRelationTypes, incomingRelationships, - 0, 100); - assertEqualsAnyOrder(relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); - assertEqualsAnyOrder(relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); + RelatedEntitiesResult relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + outgoingRelationships, + 0, + 100); + RelatedEntitiesResult relatedEntitiesOfOtherIncomingRelationTypesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + allOtherRelationTypes, + incomingRelationships, + 0, + 100); + assertEqualsAnyOrder( + relatedEntitiesOfOtherOutgoingRelationTypesAfterRemove, + relatedEntitiesOfOtherOutgoingRelationTypesBeforeRemove); + assertEqualsAnyOrder( + relatedEntitiesOfOtherIncomingRelationTypesAfterRemove, + relatedEntitiesOfOtherIncomingRelationTypesBeforeRemove); } @Test @@ -1252,50 +1398,53 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() throws Exception { Urn nodeToRemoveFrom = datasetOneUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); - - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - outgoingRelationships - ); - service.removeEdgesFromNode( - nodeToRemoveFrom, - Collections.emptyList(), - incomingRelationships - ); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); + + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), outgoingRelationships); + service.removeEdgesFromNode(nodeToRemoveFrom, Collections.emptyList(), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); - // does the test actually test something? is the Collections.emptyList() the only reason why we did not see changes? + // does the test actually test something? is the Collections.emptyList() the only reason why we + // did not see changes? service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = service.findRelatedEntities( - anyType, newFilter("urn", nodeToRemoveFrom.toString()), - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemoveAll = + service.findRelatedEntities( + anyType, + newFilter("urn", nodeToRemoveFrom.toString()), + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemoveAll, Collections.emptyList()); } @@ -1305,30 +1454,35 @@ public void testRemoveEdgesFromUnknownNode() throws Exception { Urn nodeToRemoveFrom = unknownUrn; // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); - // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported by all implementations + // can be replaced with a single removeEdgesFromNode and undirectedRelationships once supported + // by all implementations service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - outgoingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships); service.removeEdgesFromNode( - nodeToRemoveFrom, - Arrays.asList(downstreamOf, hasOwner, knowsUser), - incomingRelationships - ); + nodeToRemoveFrom, Arrays.asList(downstreamOf, hasOwner, knowsUser), incomingRelationships); syncAfterWrite(); - RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult relatedOutgoingEntitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(relatedOutgoingEntitiesAfterRemove, relatedOutgoingEntitiesBeforeRemove); } @@ -1341,17 +1495,18 @@ public void testRemoveNode() throws Exception { // assert the modified graph assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100 - ), - Arrays.asList( - hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, - knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity - ) - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100), + Arrays.asList( + hasOwnerUserOneRelatedEntity, hasOwnerUserTwoRelatedEntity, + knowsUserOneRelatedEntity, knowsUserTwoRelatedEntity)); } @Test @@ -1359,20 +1514,30 @@ public void testRemoveUnknownNode() throws Exception { GraphService service = getPopulatedGraphService(); // populated graph asserted in testPopulatedGraphService - RelatedEntitiesResult entitiesBeforeRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesBeforeRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); service.removeNode(unknownUrn); syncAfterWrite(); - RelatedEntitiesResult entitiesAfterRemove = service.findRelatedEntities( - anyType, EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf, hasOwner, knowsUser), outgoingRelationships, - 0, 100); + RelatedEntitiesResult entitiesAfterRemove = + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf, hasOwner, knowsUser), + outgoingRelationships, + 0, + 100); assertEqualsAnyOrder(entitiesBeforeRemove, entitiesAfterRemove); } @@ -1385,87 +1550,113 @@ public void testClear() throws Exception { service.clear(); syncAfterWrite(); - // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges again + // assert the modified graph: check all nodes related to upstreamOf and nextVersionOf edges + // again assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(datasetType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(downstreamOf), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(datasetType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(downstreamOf), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - ImmutableList.of(userType), EMPTY_FILTER, - anyType, EMPTY_FILTER, - Arrays.asList(hasOwner), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + ImmutableList.of(userType), + EMPTY_FILTER, + anyType, + EMPTY_FILTER, + Arrays.asList(hasOwner), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); assertEqualsAnyOrder( - service.findRelatedEntities( - anyType, EMPTY_FILTER, - ImmutableList.of(userType), EMPTY_FILTER, - Arrays.asList(knowsUser), outgoingRelationships, - 0, 100 - ), - Collections.emptyList() - ); + service.findRelatedEntities( + anyType, + EMPTY_FILTER, + ImmutableList.of(userType), + EMPTY_FILTER, + Arrays.asList(knowsUser), + outgoingRelationships, + 0, + 100), + Collections.emptyList()); } private List getFullyConnectedGraph(int nodes, List relationshipTypes) { - List edges = new ArrayList<>(); - - for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { - for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { - for (String relationship : relationshipTypes) { - int sourceType = sourceNode % 3; - Urn source = createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); - int destinationType = destinationNode % 3; - Urn destination = createFromString("urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); - - edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); - } - } + List edges = new ArrayList<>(); + + for (int sourceNode = 1; sourceNode <= nodes; sourceNode++) { + for (int destinationNode = 1; destinationNode <= nodes; destinationNode++) { + for (String relationship : relationshipTypes) { + int sourceType = sourceNode % 3; + Urn source = + createFromString("urn:li:type" + sourceType + ":(urn:li:node" + sourceNode + ")"); + int destinationType = destinationNode % 3; + Urn destination = + createFromString( + "urn:li:type" + destinationType + ":(urn:li:node" + destinationNode + ")"); + + edges.add(new Edge(source, destination, relationship, null, null, null, null, null)); + } } + } - return edges; + return edges; } @Test public void testConcurrentAddEdge() throws Exception { - final GraphService service = getGraphService(); - - // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker - int nodes = 5; - int relationshipTypes = 3; - List allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); - List edges = getFullyConnectedGraph(nodes, allRelationships); - - List operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.addEdge(edge); - } - }).collect(Collectors.toList()); - - doTestConcurrentOp(operations); - syncAfterWrite(); - - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); - - Set expectedRelatedEntities = edges.stream() - .map(edge -> new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) - .collect(Collectors.toSet()); - assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); + final GraphService service = getGraphService(); + + // too many edges may cause too many threads throwing + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker + int nodes = 5; + int relationshipTypes = 3; + List allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); + List edges = getFullyConnectedGraph(nodes, allRelationships); + + List operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.addEdge(edge); + } + }) + .collect(Collectors.toList()); + + doTestConcurrentOp(operations); + syncAfterWrite(); + + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); + + Set expectedRelatedEntities = + edges.stream() + .map( + edge -> + new RelatedEntity(edge.getRelationshipType(), edge.getDestination().toString())) + .collect(Collectors.toSet()); + assertEquals(new HashSet<>(relatedEntities.entities), expectedRelatedEntities); } @Test @@ -1474,7 +1665,10 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { int nodes = 5; int relationshipTypes = 3; - List allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1482,43 +1676,63 @@ public void testConcurrentRemoveEdgesFromNode() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // delete all edges concurrently - List operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeEdgesFromNode(edge.getSource(), Arrays.asList(edge.getRelationshipType()), outgoingRelationships); - } - }).collect(Collectors.toList()); + List operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeEdgesFromNode( + edge.getSource(), + Arrays.asList(edge.getRelationshipType()), + outgoingRelationships); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); - } + } @Test public void testConcurrentRemoveNodes() throws Exception { final GraphService service = getGraphService(); // too many edges may cause too many threads throwing - // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked worker + // java.util.concurrent.RejectedExecutionException: Thread limit exceeded replacing blocked + // worker int nodes = 5; int relationshipTypes = 3; - List allRelationships = IntStream.range(1, relationshipTypes + 1).mapToObj(id -> "relationship" + id).collect(Collectors.toList()); + List allRelationships = + IntStream.range(1, relationshipTypes + 1) + .mapToObj(id -> "relationship" + id) + .collect(Collectors.toList()); List edges = getFullyConnectedGraph(nodes, allRelationships); // add fully connected graph @@ -1526,106 +1740,131 @@ public void testConcurrentRemoveNodes() throws Exception { syncAfterWrite(); // assert the graph is there - RelatedEntitiesResult relatedEntities = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntities = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntities.entities.size(), nodes * relationshipTypes); // remove all nodes concurrently // nodes will be removed multiple times - List operations = edges.stream().map(edge -> new Runnable() { - @Override - public void run() { - service.removeNode(edge.getSource()); - } - }).collect(Collectors.toList()); + List operations = + edges.stream() + .map( + edge -> + new Runnable() { + @Override + public void run() { + service.removeNode(edge.getSource()); + } + }) + .collect(Collectors.toList()); doTestConcurrentOp(operations); syncAfterWrite(); // assert the graph is gone - RelatedEntitiesResult relatedEntitiesAfterDeletion = service.findRelatedEntities( - null, EMPTY_FILTER, - null, EMPTY_FILTER, - allRelationships, outgoingRelationships, - 0, nodes * relationshipTypes * 2 - ); + RelatedEntitiesResult relatedEntitiesAfterDeletion = + service.findRelatedEntities( + null, + EMPTY_FILTER, + null, + EMPTY_FILTER, + allRelationships, + outgoingRelationships, + 0, + nodes * relationshipTypes * 2); assertEquals(relatedEntitiesAfterDeletion.entities.size(), 0); } private void doTestConcurrentOp(List operations) throws Exception { - final Queue throwables = new ConcurrentLinkedQueue<>(); - final CountDownLatch started = new CountDownLatch(operations.size()); - final CountDownLatch finished = new CountDownLatch(operations.size()); - operations.forEach(operation -> new Thread(new Runnable() { - @Override - public void run() { - try { - started.countDown(); - - try { - if (!started.await(10, TimeUnit.SECONDS)) { - fail("Timed out waiting for all threads to start"); + final Queue throwables = new ConcurrentLinkedQueue<>(); + final CountDownLatch started = new CountDownLatch(operations.size()); + final CountDownLatch finished = new CountDownLatch(operations.size()); + operations.forEach( + operation -> + new Thread( + new Runnable() { + @Override + public void run() { + try { + started.countDown(); + + try { + if (!started.await(10, TimeUnit.SECONDS)) { + fail("Timed out waiting for all threads to start"); + } + } catch (InterruptedException e) { + fail("Got interrupted waiting for all threads to start"); + } + + operation.run(); + } catch (Throwable t) { + t.printStackTrace(); + throwables.add(t); + } + finished.countDown(); } - } catch (InterruptedException e) { - fail("Got interrupted waiting for all threads to start"); - } - - operation.run(); - } catch (Throwable t) { - t.printStackTrace(); - throwables.add(t); - } - finished.countDown(); - } - }).start()); - - assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); - throwables.forEach(throwable -> System.err.printf(System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); - assertEquals(throwables.size(), 0); + }) + .start()); + + assertTrue(finished.await(getTestConcurrentOpTimeout().toMillis(), TimeUnit.MILLISECONDS)); + throwables.forEach( + throwable -> + System.err.printf( + System.currentTimeMillis() + ": exception occurred: %s%n", throwable)); + assertEquals(throwables.size(), 0); } @Test public void testPopulatedGraphServiceGetLineageMultihop() throws Exception { - GraphService service = getLineagePopulatedGraphService(); - - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 0); - assertEquals(upstreamLineage.getRelationships().size(), 0); - - EntityLineageResult downstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - - assertEquals(downstreamLineage.getTotal().intValue(), 5); - assertEquals(downstreamLineage.getRelationships().size(), 5); - Map relationships = downstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(datasetThreeUrn)); - assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetFourUrn)); - assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobTwoUrn)); - assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); - - upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); - assertEquals(upstreamLineage.getTotal().intValue(), 3); - assertEquals(upstreamLineage.getRelationships().size(), 3); - relationships = upstreamLineage.getRelationships().stream().collect(Collectors.toMap(LineageRelationship::getEntity, - Function.identity())); - assertTrue(relationships.containsKey(datasetOneUrn)); - assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); - assertTrue(relationships.containsKey(datasetTwoUrn)); - assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); - assertTrue(relationships.containsKey(dataJobOneUrn)); - assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); - - downstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); - assertEquals(downstreamLineage.getTotal().intValue(), 0); - assertEquals(downstreamLineage.getRelationships().size(), 0); + GraphService service = getLineagePopulatedGraphService(); + + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 0); + assertEquals(upstreamLineage.getRelationships().size(), 0); + + EntityLineageResult downstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + + assertEquals(downstreamLineage.getTotal().intValue(), 5); + assertEquals(downstreamLineage.getRelationships().size(), 5); + Map relationships = + downstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(datasetThreeUrn)); + assertEquals(relationships.get(datasetThreeUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetFourUrn)); + assertEquals(relationships.get(datasetFourUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobTwoUrn)); + assertEquals(relationships.get(dataJobTwoUrn).getDegree().intValue(), 1); + + upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + assertEquals(upstreamLineage.getTotal().intValue(), 3); + assertEquals(upstreamLineage.getRelationships().size(), 3); + relationships = + upstreamLineage.getRelationships().stream() + .collect(Collectors.toMap(LineageRelationship::getEntity, Function.identity())); + assertTrue(relationships.containsKey(datasetOneUrn)); + assertEquals(relationships.get(datasetOneUrn).getDegree().intValue(), 2); + assertTrue(relationships.containsKey(datasetTwoUrn)); + assertEquals(relationships.get(datasetTwoUrn).getDegree().intValue(), 1); + assertTrue(relationships.containsKey(dataJobOneUrn)); + assertEquals(relationships.get(dataJobOneUrn).getDegree().intValue(), 1); + + downstreamLineage = + service.getLineage(datasetThreeUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + assertEquals(downstreamLineage.getTotal().intValue(), 0); + assertEquals(downstreamLineage.getRelationships().size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java index d8cd6ed05b2ec..481db53eafbbe 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphContainer.java @@ -1,14 +1,9 @@ package com.linkedin.metadata.graph.dgraph; -import com.github.dockerjava.api.command.InspectContainerResponse; -import lombok.NonNull; -import org.testcontainers.containers.GenericContainer; -import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; -import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; -import org.testcontainers.containers.wait.strategy.WaitAllStrategy; -import org.testcontainers.containers.wait.strategy.WaitStrategy; -import org.testcontainers.utility.DockerImageName; +import static java.net.HttpURLConnection.HTTP_OK; +import static java.util.stream.Collectors.toSet; +import com.github.dockerjava.api.command.InspectContainerResponse; import java.time.Duration; import java.util.Arrays; import java.util.HashMap; @@ -16,223 +11,235 @@ import java.util.Set; import java.util.StringJoiner; import java.util.stream.Stream; - -import static java.net.HttpURLConnection.HTTP_OK; -import static java.util.stream.Collectors.toSet; +import lombok.NonNull; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.wait.strategy.HttpWaitStrategy; +import org.testcontainers.containers.wait.strategy.LogMessageWaitStrategy; +import org.testcontainers.containers.wait.strategy.WaitAllStrategy; +import org.testcontainers.containers.wait.strategy.WaitStrategy; +import org.testcontainers.utility.DockerImageName; public class DgraphContainer extends GenericContainer { - /** - * The image defaults to the official Dgraph image: Dgraph. - */ - public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); + /** + * The image defaults to the official Dgraph image: Dgraph. + */ + public static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("dgraph/dgraph"); - private static final int HTTP_PORT = 8080; + private static final int HTTP_PORT = 8080; - private static final int GRPC_PORT = 9080; + private static final int GRPC_PORT = 9080; - private boolean started = false; + private boolean started = false; - @Override - protected void containerIsStarted(InspectContainerResponse containerInfo) { - super.containerIsStarted(containerInfo); - started = true; - } + @Override + protected void containerIsStarted(InspectContainerResponse containerInfo) { + super.containerIsStarted(containerInfo); + started = true; + } - @Override - protected void containerIsStopped(InspectContainerResponse containerInfo) { - super.containerIsStopped(containerInfo); - started = false; - } + @Override + protected void containerIsStopped(InspectContainerResponse containerInfo) { + super.containerIsStopped(containerInfo); + started = false; + } - private final Map zeroArguments = new HashMap<>(); + private final Map zeroArguments = new HashMap<>(); - private final Map alphaArguments = new HashMap<>(); + private final Map alphaArguments = new HashMap<>(); - /** - * Creates a DgraphContainer using a specific docker image. Connect the container - * to another DgraphContainer to form a cluster via `peerAlias`. - * - * @param dockerImageName The docker image to use. - */ - public DgraphContainer(@NonNull final DockerImageName dockerImageName) { - super(dockerImageName); + /** + * Creates a DgraphContainer using a specific docker image. Connect the container to another + * DgraphContainer to form a cluster via `peerAlias`. + * + * @param dockerImageName The docker image to use. + */ + public DgraphContainer(@NonNull final DockerImageName dockerImageName) { + super(dockerImageName); - dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); + dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME); - WaitStrategy waitForLeader = new LogMessageWaitStrategy() - .withRegEx(".* Got Zero leader: .*\n"); - WaitStrategy waitForCluster = new LogMessageWaitStrategy() - .withRegEx(".* Server is ready\n"); - WaitStrategy waitForHttp = new HttpWaitStrategy() + WaitStrategy waitForLeader = new LogMessageWaitStrategy().withRegEx(".* Got Zero leader: .*\n"); + WaitStrategy waitForCluster = new LogMessageWaitStrategy().withRegEx(".* Server is ready\n"); + WaitStrategy waitForHttp = + new HttpWaitStrategy() .forPort(HTTP_PORT) .forStatusCodeMatching(response -> response == HTTP_OK); - this.waitStrategy = new WaitAllStrategy() + this.waitStrategy = + new WaitAllStrategy() .withStrategy(waitForLeader) .withStrategy(waitForCluster) .withStrategy(waitForHttp) .withStartupTimeout(Duration.ofMinutes(1)); - if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { - withAlphaArgument("whitelist", "0.0.0.0/0"); - } else { - withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); - } - - addExposedPorts(HTTP_PORT, GRPC_PORT); - } - - /** - * Adds an argument to the zero command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withZeroArgument(@NonNull String argument, String value) { - addArgument(zeroArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the zero command. - * - * Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withZeroArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(zeroArguments, argument, values); - return this; - } - - /** - * Adds an argument to the alpha command. - * - * @param argument name of the argument - * @param value value, null if argument is a flag - * @return this - */ - public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { - addArgument(alphaArguments, argument, value); - return this; - } - - /** - * Adds a value to an argument list to the alpha command. - * - * Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. - * These values are separated by a ";". Setting multiple values for those arguments should - * be done via this method. - * - * @param argument name of the argument - * @param values values to add to the argument - * @return this - */ - public DgraphContainer withAlphaArgumentValues(@NonNull String argument, @NonNull String... values) { - addArgumentValues(alphaArguments, argument, values); - return this; - } - - private void addArgument(Map arguments, @NonNull String argument, String value) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - arguments.put(argument, value); - } - - private void addArgumentValues(Map arguments, @NonNull String argument, @NonNull String... values) { - if (started) { - throw new IllegalStateException("The container started already, cannot amend command arguments"); - } - - StringJoiner joiner = new StringJoiner("; "); - Arrays.stream(values).forEach(joiner::add); - String value = joiner.toString(); - - if (arguments.containsKey(argument)) { - arguments.put(argument, arguments.get(argument) + "; " + value); - } else { - arguments.put(argument, value); - } - } - - /** - * Provides the command used to start the zero process. Command line arguments can be added - * by calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getZeroCommand() { - return getCommand("dgraph zero", zeroArguments); - } - - /** - * Provides the command used to start the alpha process. Command line arguments can be added - * by calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. - * @return command string - */ - public @NonNull String getAlphaCommand() { - return getCommand("dgraph alpha", alphaArguments); - } - - private @NonNull String getCommand(@NonNull String command, @NonNull Map arguments) { - StringJoiner joiner = new StringJoiner(" --"); - - arguments.entrySet().stream() - .sorted(Map.Entry.comparingByKey()) - .map(argument -> { - if (argument.getValue() == null) { - return argument.getKey(); - } else { - return argument.getKey() + " \"" + argument.getValue() + "\""; - } - }).forEach(joiner::add); - - if (joiner.length() == 0) { - return command; - } else { - return command + " --" + joiner; - } - } - - @Override - public void start() { - String zeroCommand = this.getZeroCommand(); - String alhpaCommand = this.getAlphaCommand(); - this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); - super.start(); - } - - @Override - public Set getLivenessCheckPortNumbers() { - return Stream.of(getHttpPort(), getGrpcPort()) - .map(this::getMappedPort) - .collect(toSet()); - } - - @Override - protected void configure() { } - - public int getHttpPort() { - return getMappedPort(HTTP_PORT); - } - - public int getGrpcPort() { - return getMappedPort(GRPC_PORT); - } - - public String getHttpUrl() { - return String.format("http://%s:%d", getHost(), getHttpPort()); - } - - public String getGrpcUrl() { - return String.format("%s:%d", getHost(), getGrpcPort()); - } - + if (dockerImageName.getVersionPart().compareTo("v21.03.0") < 0) { + withAlphaArgument("whitelist", "0.0.0.0/0"); + } else { + withAlphaArgumentValues("security", "whitelist=0.0.0.0/0"); + } + + addExposedPorts(HTTP_PORT, GRPC_PORT); + } + + /** + * Adds an argument to the zero command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withZeroArgument(@NonNull String argument, String value) { + addArgument(zeroArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the zero command. + * + *

Some arguments of the zero command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withZeroArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(zeroArguments, argument, values); + return this; + } + + /** + * Adds an argument to the alpha command. + * + * @param argument name of the argument + * @param value value, null if argument is a flag + * @return this + */ + public DgraphContainer withAlphaArgument(@NonNull String argument, String value) { + addArgument(alphaArguments, argument, value); + return this; + } + + /** + * Adds a value to an argument list to the alpha command. + * + *

Some arguments of the alpha command form a list of values, e.g. `audit` or `raft`. These + * values are separated by a ";". Setting multiple values for those arguments should be done via + * this method. + * + * @param argument name of the argument + * @param values values to add to the argument + * @return this + */ + public DgraphContainer withAlphaArgumentValues( + @NonNull String argument, @NonNull String... values) { + addArgumentValues(alphaArguments, argument, values); + return this; + } + + private void addArgument(Map arguments, @NonNull String argument, String value) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + arguments.put(argument, value); + } + + private void addArgumentValues( + Map arguments, @NonNull String argument, @NonNull String... values) { + if (started) { + throw new IllegalStateException( + "The container started already, cannot amend command arguments"); + } + + StringJoiner joiner = new StringJoiner("; "); + Arrays.stream(values).forEach(joiner::add); + String value = joiner.toString(); + + if (arguments.containsKey(argument)) { + arguments.put(argument, arguments.get(argument) + "; " + value); + } else { + arguments.put(argument, value); + } + } + + /** + * Provides the command used to start the zero process. Command line arguments can be added by + * calling `withZeroArgument` and `withZeroArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getZeroCommand() { + return getCommand("dgraph zero", zeroArguments); + } + + /** + * Provides the command used to start the alpha process. Command line arguments can be added by + * calling `withAlphaArgument` and `withAlphaArgumentValues` before calling this method. + * + * @return command string + */ + public @NonNull String getAlphaCommand() { + return getCommand("dgraph alpha", alphaArguments); + } + + private @NonNull String getCommand( + @NonNull String command, @NonNull Map arguments) { + StringJoiner joiner = new StringJoiner(" --"); + + arguments.entrySet().stream() + .sorted(Map.Entry.comparingByKey()) + .map( + argument -> { + if (argument.getValue() == null) { + return argument.getKey(); + } else { + return argument.getKey() + " \"" + argument.getValue() + "\""; + } + }) + .forEach(joiner::add); + + if (joiner.length() == 0) { + return command; + } else { + return command + " --" + joiner; + } + } + + @Override + public void start() { + String zeroCommand = this.getZeroCommand(); + String alhpaCommand = this.getAlphaCommand(); + this.setCommand("/bin/bash", "-c", zeroCommand + " & " + alhpaCommand); + super.start(); + } + + @Override + public Set getLivenessCheckPortNumbers() { + return Stream.of(getHttpPort(), getGrpcPort()).map(this::getMappedPort).collect(toSet()); + } + + @Override + protected void configure() {} + + public int getHttpPort() { + return getMappedPort(HTTP_PORT); + } + + public int getGrpcPort() { + return getMappedPort(GRPC_PORT); + } + + public String getHttpUrl() { + return String.format("http://%s:%d", getHost(), getHttpPort()); + } + + public String getGrpcUrl() { + return String.format("%s:%d", getHost(), getGrpcPort()); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java index abf9bf532ddd8..40b8e83b56d03 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/dgraph/DgraphGraphServiceTest.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.graph.dgraph; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; +import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.GraphServiceTestBase; @@ -16,15 +23,6 @@ import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import io.grpc.MethodDescriptor; -import lombok.extern.slf4j.Slf4j; -import org.testcontainers.containers.output.Slf4jLogConsumer; -import org.testng.annotations.AfterClass; -import org.testng.annotations.AfterMethod; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.time.Duration; import java.util.Arrays; import java.util.Collections; @@ -32,89 +30,94 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; -import static com.linkedin.metadata.search.utils.QueryUtils.EMPTY_FILTER; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testng.annotations.AfterClass; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; @SuppressWarnings("ArraysAsListWithZeroOrOneArgument") @Slf4j public class DgraphGraphServiceTest extends GraphServiceTestBase { - private ManagedChannel _channel; - private DgraphGraphService _service; - private DgraphContainer _container; - - @Override - protected Duration getTestConcurrentOpTimeout() { - return Duration.ofMinutes(5); - } - - @BeforeClass - public void setup() { - _container = new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) - .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) - .withStartupTimeout(Duration.ofMinutes(1)) - .withStartupAttempts(3); - checkContainerEngine(_container.getDockerClient()); - _container.start(); - Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); - _container.followOutput(logConsumer); - } - - @BeforeMethod - public void connect() { - LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - _channel = ManagedChannelBuilder - .forAddress(_container.getHost(), _container.getGrpcPort()) - .usePlaintext() - .build(); - - // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 - ClientInterceptor timeoutInterceptor = new ClientInterceptor() { - @Override - public ClientCall interceptCall( - MethodDescriptor method, CallOptions callOptions, Channel next) { - return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); - } + private ManagedChannel _channel; + private DgraphGraphService _service; + private DgraphContainer _container; + + @Override + protected Duration getTestConcurrentOpTimeout() { + return Duration.ofMinutes(5); + } + + @BeforeClass + public void setup() { + _container = + new DgraphContainer(DgraphContainer.DEFAULT_IMAGE_NAME.withTag("v21.03.0")) + .withTmpFs(Collections.singletonMap("/dgraph", "rw,noexec,nosuid,size=1g")) + .withStartupTimeout(Duration.ofMinutes(1)) + .withStartupAttempts(3); + checkContainerEngine(_container.getDockerClient()); + _container.start(); + Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(log); + _container.followOutput(logConsumer); + } + + @BeforeMethod + public void connect() { + LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); + _channel = + ManagedChannelBuilder.forAddress(_container.getHost(), _container.getGrpcPort()) + .usePlaintext() + .build(); + + // https://discuss.dgraph.io/t/dgraph-java-client-setting-deadlines-per-call/3056 + ClientInterceptor timeoutInterceptor = + new ClientInterceptor() { + @Override + public ClientCall interceptCall( + MethodDescriptor method, CallOptions callOptions, Channel next) { + return next.newCall(method, callOptions.withDeadlineAfter(30, TimeUnit.SECONDS)); + } }; - DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); - _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); - } - - @AfterMethod - public void disconnect() throws InterruptedException { - try { - _channel.shutdownNow(); - _channel.awaitTermination(10, TimeUnit.SECONDS); - } finally { - _channel = null; - _service = null; - } - } - - @AfterClass - public void tearDown() { - _container.stop(); - } - - @Nonnull - @Override - protected GraphService getGraphService() { - _service.clear(); - return _service; + DgraphGrpc.DgraphStub stub = DgraphGrpc.newStub(_channel).withInterceptors(timeoutInterceptor); + _service = new DgraphGraphService(lineageRegistry, new DgraphClient(stub)); + } + + @AfterMethod + public void disconnect() throws InterruptedException { + try { + _channel.shutdownNow(); + _channel.awaitTermination(10, TimeUnit.SECONDS); + } finally { + _channel = null; + _service = null; } - - @Override - protected void syncAfterWrite() { } - - @Test - public void testGetSchema() { - DgraphSchema schema = DgraphGraphService.getSchema("{\n" + } + + @AfterClass + public void tearDown() { + _container.stop(); + } + + @Nonnull + @Override + protected GraphService getGraphService() { + _service.clear(); + return _service; + } + + @Override + protected void syncAfterWrite() {} + + @Test + public void testGetSchema() { + DgraphSchema schema = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -156,45 +159,69 @@ public void testGetSchema() { + " }\n" + " ]\n" + " }"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getTypes(), new HashMap>() {{ + assertEquals( + schema.getTypes(), + new HashMap>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); - }}); - - assertEquals(schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); - assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); - assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); - - schema.ensureField("newType", "newField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); - assertEquals(schema.getTypes(), new HashMap>() {{ + } + }); + + assertEquals( + schema.getFields("ns:typeOne"), new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); + assertEquals(schema.getFields("ns:typeTwo"), new HashSet<>(Arrays.asList("PredTwo"))); + assertEquals(schema.getFields("ns:unknown"), Collections.emptySet()); + + schema.ensureField("newType", "newField"); + assertEquals( + schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField"))); + assertEquals( + schema.getTypes(), + new HashMap>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeOne", "otherField"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap>() {{ + } + }); + + schema.ensureField("ns:typeOne", "otherField"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - - schema.ensureField("ns:typeTwo", "PredTwo"); - assertEquals(schema.getFields(), new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); - assertEquals(schema.getTypes(), new HashMap>() {{ + } + }); + + schema.ensureField("ns:typeTwo", "PredTwo"); + assertEquals( + schema.getFields(), + new HashSet<>(Arrays.asList("PredOne", "PredTwo", "newField", "otherField"))); + assertEquals( + schema.getTypes(), + new HashMap>() { + { put("ns:typeOne", new HashSet<>(Arrays.asList("PredOne", "PredTwo", "otherField"))); put("ns:typeTwo", new HashSet<>(Arrays.asList("PredTwo"))); put("newType", new HashSet<>(Arrays.asList("newField"))); - }}); - } - - @Test - public void testGetSchemaIncomplete() { - DgraphSchema schemaWithNonListTypes = DgraphGraphService.getSchema("{\n" + } + }); + } + + @Test + public void testGetSchemaIncomplete() { + DgraphSchema schemaWithNonListTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -208,9 +235,11 @@ public void testGetSchemaIncomplete() { + " ],\n" + " \"types\": \"not a list\"\n" + " }"); - assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); + assertTrue(schemaWithNonListTypes.isEmpty(), "Should be empty if type field is not a list"); - DgraphSchema schemaWithoutTypes = DgraphGraphService.getSchema("{\n" + DgraphSchema schemaWithoutTypes = + DgraphGraphService.getSchema( + "{\n" + " \"schema\": [\n" + " {\n" + " \"predicate\": \"PredOne\"\n" @@ -223,570 +252,575 @@ public void testGetSchemaIncomplete() { + " }\n" + " ]" + " }"); - assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); - - DgraphSchema schemaWithNonListSchema = DgraphGraphService.getSchema("{\n" - + " \"schema\": \"not a list\"" - + " }"); - assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); - - DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); - assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); - } - - @Test - public void testGetSchemaDgraph() { - // TODO: test that dgraph schema gets altered - } - - @Test - public void testGetFilterConditions() { - // no filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "" - ); - - // source type not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceTypeFilter))\n" - + " )\n" - + " )" - ); - - // destination type - assertEquals( - DgraphGraphService.getFilterConditions( - null, - "destinationTypeFilter", - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationTypeFilter)\n" - + " )" - ); - - // source filter not supported without restricting relationship types - // there must be as many relation type filter names as there are relationships - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceFilter))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter"), - Arrays.asList("relationship")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceFilter1)) AND " - + "uid_in(, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Collections.emptyList(), - Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(RelationshipTypeFilter1) AND uid_in(, uid(sourceFilter1)) AND " - + "uid_in(, uid(sourceFilter2)) OR\n" - + " uid(RelationshipTypeFilter2) AND uid_in(, uid(sourceFilter1)) AND " - + "uid_in(, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // destination filters - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter)\n" - + " )" - ); - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Collections.emptyList(), - Collections.emptyList()), - "@filter(\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2)\n" - + " )" - ); - - // relationship type filters require relationship types - assertEquals( - DgraphGraphService.getFilterConditions( - null, - null, - Collections.emptyList(), - Collections.emptyList(), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " (\n" - + " uid(relationshipTypeFilter1) OR\n" - + " uid(relationshipTypeFilter2)\n" - + " )\n" - + " )" - ); - - // all filters at once - assertEquals( - DgraphGraphService.getFilterConditions( - "sourceTypeFilter", - "destinationTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("destinationFilter1", "destinationFilter2"), - Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), - Arrays.asList("relationship1", "relationship2")), - "@filter(\n" - + " uid(destinationTypeFilter) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipTypeFilter1) AND uid_in(, uid(sourceTypeFilter)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" - + " uid(relationshipTypeFilter2) AND uid_in(, uid(sourceTypeFilter)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2))\n" - + " )\n" - + " )" - ); - - // TODO: check getFilterConditions throws an exception when relationshipTypes and - // relationshipTypeFilterNames do not have the same size - } - - @Test - public void testGetRelationships() { - // no relationships - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Collections.emptyList()), - Collections.emptyList() - ); - - // one relationship but no filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Collections.emptyList(), - Arrays.asList("relationship") - ), - Arrays.asList(" { }") - ); - - // more relationship and source type filter - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Collections.emptyList(), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - " @filter( uid(sourceTypeFilter) ) { }", - "<~relationship2> @filter( uid(sourceTypeFilter) ) { }" - ) - ); - - // more relationship, source type and source filters - assertEquals( - DgraphGraphService.getRelationships( - "sourceTypeFilter", - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2") - ), - Arrays.asList( - " @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }", - "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }" - ) - ); - - // more relationship and only source filters - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter1", "sourceFilter2"), - Arrays.asList("relationship1", "~relationship2", "relationship3") - ), - Arrays.asList( - " @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }", - "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }", - " @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }" - ) - ); - - // two relationship and only one source filter - assertEquals( - DgraphGraphService.getRelationships( - null, - Arrays.asList("sourceFilter"), - Arrays.asList("~relationship1", "~relationship2") - ), - Arrays.asList( - "<~relationship1> @filter( uid(sourceFilter) ) { }", - "<~relationship2> @filter( uid(sourceFilter) ) { }" - ) - ); - } - - @Test - public void testGetRelationshipCondition() { - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Collections.emptyList()), - "uid(relationshipFilter)" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Collections.emptyList()), - "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter")), - "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter)) AND " - + "uid_in(, uid(destinationFilter))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - "destinationTypeFilter", - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter)) AND " - + "uid_in(, uid(destinationFilter1)) AND uid_in(, uid(destinationFilter2))" - ); - - assertEquals( - DgraphGraphService.getRelationshipCondition( - "relationship", - "relationshipFilter", - null, - Arrays.asList("destinationFilter1", "destinationFilter2")), - "uid(relationshipFilter) AND uid_in(, uid(destinationFilter1)) AND " - + "uid_in(, uid(destinationFilter2))" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesOutgoing() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.OUTGOING, - "query {\n" - + " sourceType as var(func: eq(, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" - + " relationshipType1 as var(func: has(<~relationship1>))\n" - + " relationshipType2 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " \n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesIncoming() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.INCOMING, - "query {\n" - + " sourceType as var(func: eq(, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" - + " relationshipType1 as var(func: has())\n" - + " relationshipType2 as var(func: has())\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " - + "first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(, uid(sourceType)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(, uid(sourceType)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " \n" - + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " }\n" - + "}" - ); - } - - @Test - public void testGetQueryForRelatedEntitiesUndirected() { - doTestGetQueryForRelatedEntitiesDirection(RelationshipDirection.UNDIRECTED, - "query {\n" - + " sourceType as var(func: eq(, [\"sourceType\"]))\n" - + " destinationType as var(func: eq(, [\"destinationType\"]))\n" - + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" - + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" - + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" - + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" - + " relationshipType1 as var(func: has())\n" - + " relationshipType2 as var(func: has())\n" - + " relationshipType3 as var(func: has(<~relationship1>))\n" - + " relationshipType4 as var(func: has(<~relationship2>))\n" - + "\n" - + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " - + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" - + " uid(destinationType) AND\n" - + " uid(destinationFilter1) AND\n" - + " uid(destinationFilter2) AND\n" - + " (\n" - + " uid(relationshipType1) AND uid_in(, uid(sourceType)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" - + " uid(relationshipType2) AND uid_in(, uid(sourceType)) AND " - + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" - + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " - + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" - + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " - + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" - + " )\n" - + " ) {\n" - + " \n" - + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" - + " }\n" - + "}" - ); - } - - private void doTestGetQueryForRelatedEntitiesDirection(@Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { - assertEquals( - DgraphGraphService.getQueryForRelatedEntities( - ImmutableList.of("sourceType"), - newFilter(new HashMap() {{ - put("urn", "urn:ns:type:source-key"); - put("key", "source-key"); - }}), - ImmutableList.of("destinationType"), - newFilter(new HashMap() {{ + assertTrue(schemaWithoutTypes.isEmpty(), "Should be empty if no type field exists"); + + DgraphSchema schemaWithNonListSchema = + DgraphGraphService.getSchema("{\n" + " \"schema\": \"not a list\"" + " }"); + assertTrue(schemaWithNonListSchema.isEmpty(), "Should be empty if schema field is not a list"); + + DgraphSchema schemaWithoutSchema = DgraphGraphService.getSchema("{ }"); + assertTrue(schemaWithoutSchema.isEmpty(), "Should be empty if no schema field exists"); + } + + @Test + public void testGetSchemaDgraph() { + // TODO: test that dgraph schema gets altered + } + + @Test + public void testGetFilterConditions() { + // no filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + ""); + + // source type not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceTypeFilter))\n" + + " )\n" + + " )"); + + // destination type + assertEquals( + DgraphGraphService.getFilterConditions( + null, + "destinationTypeFilter", + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationTypeFilter)\n" + " )"); + + // source filter not supported without restricting relationship types + // there must be as many relation type filter names as there are relationships + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceFilter))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter"), + Arrays.asList("relationship")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter) AND uid_in(, uid(sourceFilter1)) AND " + + "uid_in(, uid(sourceFilter2))\n" + + " )\n" + + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Collections.emptyList(), + Arrays.asList("RelationshipTypeFilter1", "RelationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(RelationshipTypeFilter1) AND uid_in(, uid(sourceFilter1)) AND " + + "uid_in(, uid(sourceFilter2)) OR\n" + + " uid(RelationshipTypeFilter2) AND uid_in(, uid(sourceFilter1)) AND " + + "uid_in(, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // destination filters + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + " uid(destinationFilter)\n" + " )"); + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Collections.emptyList(), + Collections.emptyList()), + "@filter(\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2)\n" + + " )"); + + // relationship type filters require relationship types + assertEquals( + DgraphGraphService.getFilterConditions( + null, + null, + Collections.emptyList(), + Collections.emptyList(), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " (\n" + + " uid(relationshipTypeFilter1) OR\n" + + " uid(relationshipTypeFilter2)\n" + + " )\n" + + " )"); + + // all filters at once + assertEquals( + DgraphGraphService.getFilterConditions( + "sourceTypeFilter", + "destinationTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("destinationFilter1", "destinationFilter2"), + Arrays.asList("relationshipTypeFilter1", "relationshipTypeFilter2"), + Arrays.asList("relationship1", "relationship2")), + "@filter(\n" + + " uid(destinationTypeFilter) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipTypeFilter1) AND uid_in(, uid(sourceTypeFilter)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" + + " uid(relationshipTypeFilter2) AND uid_in(, uid(sourceTypeFilter)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2))\n" + + " )\n" + + " )"); + + // TODO: check getFilterConditions throws an exception when relationshipTypes and + // relationshipTypeFilterNames do not have the same size + } + + @Test + public void testGetRelationships() { + // no relationships + assertEquals( + DgraphGraphService.getRelationships(null, Collections.emptyList(), Collections.emptyList()), + Collections.emptyList()); + + // one relationship but no filters + assertEquals( + DgraphGraphService.getRelationships( + null, Collections.emptyList(), Arrays.asList("relationship")), + Arrays.asList(" { }")); + + // more relationship and source type filter + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Collections.emptyList(), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + " @filter( uid(sourceTypeFilter) ) { }", + "<~relationship2> @filter( uid(sourceTypeFilter) ) { }")); + + // more relationship, source type and source filters + assertEquals( + DgraphGraphService.getRelationships( + "sourceTypeFilter", + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2")), + Arrays.asList( + " @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }", + "<~relationship2> @filter( uid(sourceTypeFilter) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }")); + + // more relationship and only source filters + assertEquals( + DgraphGraphService.getRelationships( + null, + Arrays.asList("sourceFilter1", "sourceFilter2"), + Arrays.asList("relationship1", "~relationship2", "relationship3")), + Arrays.asList( + " @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }", + "<~relationship2> @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }", + " @filter( uid(sourceFilter1) AND uid(sourceFilter2) ) { }")); + + // two relationship and only one source filter + assertEquals( + DgraphGraphService.getRelationships( + null, Arrays.asList("sourceFilter"), Arrays.asList("~relationship1", "~relationship2")), + Arrays.asList( + "<~relationship1> @filter( uid(sourceFilter) ) { }", + "<~relationship2> @filter( uid(sourceFilter) ) { }")); + } + + @Test + public void testGetRelationshipCondition() { + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", null, Collections.emptyList()), + "uid(relationshipFilter)"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", "relationshipFilter", "destinationTypeFilter", Collections.emptyList()), + "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter")), + "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter)) AND " + + "uid_in(, uid(destinationFilter))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + "destinationTypeFilter", + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(, uid(destinationTypeFilter)) AND " + + "uid_in(, uid(destinationFilter1)) AND uid_in(, uid(destinationFilter2))"); + + assertEquals( + DgraphGraphService.getRelationshipCondition( + "relationship", + "relationshipFilter", + null, + Arrays.asList("destinationFilter1", "destinationFilter2")), + "uid(relationshipFilter) AND uid_in(, uid(destinationFilter1)) AND " + + "uid_in(, uid(destinationFilter2))"); + } + + @Test + public void testGetQueryForRelatedEntitiesOutgoing() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.OUTGOING, + "query {\n" + + " sourceType as var(func: eq(, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" + + " relationshipType1 as var(func: has(<~relationship1>))\n" + + " relationshipType2 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " \n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesIncoming() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.INCOMING, + "query {\n" + + " sourceType as var(func: eq(, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" + + " relationshipType1 as var(func: has())\n" + + " relationshipType2 as var(func: has())\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, relationshipType1, relationshipType2), " + + "first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(, uid(sourceType)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(, uid(sourceType)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " \n" + + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " }\n" + + "}"); + } + + @Test + public void testGetQueryForRelatedEntitiesUndirected() { + doTestGetQueryForRelatedEntitiesDirection( + RelationshipDirection.UNDIRECTED, + "query {\n" + + " sourceType as var(func: eq(, [\"sourceType\"]))\n" + + " destinationType as var(func: eq(, [\"destinationType\"]))\n" + + " sourceFilter1 as var(func: eq(, \"urn:ns:type:source-key\"))\n" + + " sourceFilter2 as var(func: eq(, \"source-key\"))\n" + + " destinationFilter1 as var(func: eq(, \"urn:ns:type:dest-key\"))\n" + + " destinationFilter2 as var(func: eq(, \"dest-key\"))\n" + + " relationshipType1 as var(func: has())\n" + + " relationshipType2 as var(func: has())\n" + + " relationshipType3 as var(func: has(<~relationship1>))\n" + + " relationshipType4 as var(func: has(<~relationship2>))\n" + + "\n" + + " result (func: uid(destinationFilter1, destinationFilter2, destinationType, " + + "relationshipType1, relationshipType2, relationshipType3, relationshipType4), first: 100, offset: 0) @filter(\n" + + " uid(destinationType) AND\n" + + " uid(destinationFilter1) AND\n" + + " uid(destinationFilter2) AND\n" + + " (\n" + + " uid(relationshipType1) AND uid_in(, uid(sourceType)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" + + " uid(relationshipType2) AND uid_in(, uid(sourceType)) AND " + + "uid_in(, uid(sourceFilter1)) AND uid_in(, uid(sourceFilter2)) OR\n" + + " uid(relationshipType3) AND uid_in(<~relationship1>, uid(sourceType)) AND " + + "uid_in(<~relationship1>, uid(sourceFilter1)) AND uid_in(<~relationship1>, uid(sourceFilter2)) OR\n" + + " uid(relationshipType4) AND uid_in(<~relationship2>, uid(sourceType)) AND " + + "uid_in(<~relationship2>, uid(sourceFilter1)) AND uid_in(<~relationship2>, uid(sourceFilter2))\n" + + " )\n" + + " ) {\n" + + " \n" + + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " <~relationship1> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " <~relationship2> @filter( uid(sourceType) AND uid(sourceFilter1) AND uid(sourceFilter2) ) { }\n" + + " }\n" + + "}"); + } + + private void doTestGetQueryForRelatedEntitiesDirection( + @Nonnull RelationshipDirection direction, @Nonnull String expectedQuery) { + assertEquals( + DgraphGraphService.getQueryForRelatedEntities( + ImmutableList.of("sourceType"), + newFilter( + new HashMap() { + { + put("urn", "urn:ns:type:source-key"); + put("key", "source-key"); + } + }), + ImmutableList.of("destinationType"), + newFilter( + new HashMap() { + { + put("urn", "urn:ns:type:dest-key"); + put("key", "dest-key"); + } + }), + Arrays.asList("relationship1", "relationship2"), + newRelationshipFilter(EMPTY_FILTER, direction), + 0, + 100), + expectedQuery); + } + + @Test + public void testGetDestinationUrnsFromResponseData() { + // no results + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap() { + { + put("result", Collections.emptyList()); + } + }), + Collections.emptyList()); + + // one result and one relationship with two sources + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap() { + { + put( + "result", + Arrays.asList( + new HashMap() { + { put("urn", "urn:ns:type:dest-key"); - put("key", "dest-key"); - }}), - Arrays.asList("relationship1", "relationship2"), - newRelationshipFilter(EMPTY_FILTER, direction), - 0, 100 - ), - expectedQuery - ); - } - - @Test - public void testGetDestinationUrnsFromResponseData() { - // no results - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap() {{ - put("result", Collections.emptyList()); - }} - ), - Collections.emptyList() - ); - - // one result and one relationship with two sources - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap() {{ - put("result", Arrays.asList( - new HashMap() {{ - put("urn", "urn:ns:type:dest-key"); - put("~pred", Arrays.asList( - new HashMap() {{ - put("uid", "0x1"); - }}, - new HashMap() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key")) - ); - - // multiple results and one relationship - assertEquals( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap() {{ - put("result", Arrays.asList( - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred", Arrays.asList( - new HashMap() {{ - put("uid", "0x1"); - }}, - new HashMap() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred", Arrays.asList( - new HashMap() {{ - put("uid", "0x2"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred", "urn:ns:type:dest-key-2") - ) - ); - - // multiple results and relationships - assertEqualsAnyOrder( - DgraphGraphService.getRelatedEntitiesFromResponseData( - new HashMap() {{ - put("result", Arrays.asList( - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-1"); - put("~pred1", Arrays.asList( - new HashMap() {{ - put("uid", "0x1"); - }}, - new HashMap() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-2"); - put("~pred1", Arrays.asList( - new HashMap() {{ - put("uid", "0x2"); - }} - )); - }}, - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-3"); - put("pred1", Arrays.asList( - new HashMap() {{ - put("uid", "0x3"); - }} - )); - put("~pred1", Arrays.asList( - new HashMap() {{ - put("uid", "0x1"); - }}, - new HashMap() {{ - put("uid", "0x4"); - }} - )); - }}, - new HashMap() {{ - put("urn", "urn:ns:type:dest-key-4"); - put("pred2", Arrays.asList( - new HashMap() {{ - put("uid", "0x5"); - }} - )); - }} - )); - }} - ), - Arrays.asList( - new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), - new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), - new RelatedEntity("pred2", "urn:ns:type:dest-key-4") - ), - RELATED_ENTITY_COMPARATOR - ); - } - - @Override - public void testPopulatedGraphServiceGetLineageMultihop() { - // TODO: Remove this overridden method once the multihop for dGraph is implemented! - } + put( + "~pred", + Arrays.asList( + new HashMap() { + { + put("uid", "0x1"); + } + }, + new HashMap() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList(new RelatedEntity("pred", "urn:ns:type:dest-key"))); + + // multiple results and one relationship + assertEquals( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap() { + { + put( + "result", + Arrays.asList( + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred", + Arrays.asList( + new HashMap() { + { + put("uid", "0x1"); + } + }, + new HashMap() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred", + Arrays.asList( + new HashMap() { + { + put("uid", "0x2"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred", "urn:ns:type:dest-key-2"))); + + // multiple results and relationships + assertEqualsAnyOrder( + DgraphGraphService.getRelatedEntitiesFromResponseData( + new HashMap() { + { + put( + "result", + Arrays.asList( + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-1"); + put( + "~pred1", + Arrays.asList( + new HashMap() { + { + put("uid", "0x1"); + } + }, + new HashMap() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-2"); + put( + "~pred1", + Arrays.asList( + new HashMap() { + { + put("uid", "0x2"); + } + })); + } + }, + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-3"); + put( + "pred1", + Arrays.asList( + new HashMap() { + { + put("uid", "0x3"); + } + })); + put( + "~pred1", + Arrays.asList( + new HashMap() { + { + put("uid", "0x1"); + } + }, + new HashMap() { + { + put("uid", "0x4"); + } + })); + } + }, + new HashMap() { + { + put("urn", "urn:ns:type:dest-key-4"); + put( + "pred2", + Arrays.asList( + new HashMap() { + { + put("uid", "0x5"); + } + })); + } + })); + } + }), + Arrays.asList( + new RelatedEntity("pred1", "urn:ns:type:dest-key-1"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-2"), + new RelatedEntity("pred1", "urn:ns:type:dest-key-3"), + new RelatedEntity("pred2", "urn:ns:type:dest-key-4")), + RELATED_ENTITY_COMPARATOR); + } + + @Override + public void testPopulatedGraphServiceGetLineageMultihop() { + // TODO: Remove this overridden method once the multihop for dGraph is implemented! + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java index 6f63209f9c380..f1113368601c6 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jGraphServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.DataPlatformUrn; @@ -18,7 +21,12 @@ import com.linkedin.metadata.query.filter.RelationshipFilter; import java.util.Arrays; import java.util.Collections; - +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.GraphDatabase; import org.testng.SkipException; @@ -27,17 +35,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - - public class Neo4jGraphServiceTest extends GraphServiceTestBase { private Neo4jTestServerBuilder _serverBuilder; @@ -51,7 +48,8 @@ public void init() { _serverBuilder = new Neo4jTestServerBuilder(); _serverBuilder.newServer(); _driver = GraphDatabase.driver(_serverBuilder.boltURI()); - _client = new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); + _client = + new Neo4jGraphService(new LineageRegistry(SnapshotEntityRegistry.getInstance()), _driver); _client.clear(); } @@ -66,17 +64,16 @@ public void tearDown() { } @Override - protected @Nonnull - GraphService getGraphService() { + protected @Nonnull GraphService getGraphService() { return _client; } @Override - protected void syncAfterWrite() { - } + protected void syncAfterWrite() {} @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -85,20 +82,20 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected void assertEqualsAnyOrder(List actual, List expected, Comparator comparator) { + protected void assertEqualsAnyOrder( + List actual, List expected, Comparator comparator) { // https://github.com/datahub-project/datahub/issues/3118 // Neo4jGraphService produces duplicates, which is here ignored until fixed - assertEquals( - new HashSet<>(actual), - new HashSet<>(expected) - ); + assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty source type"); @@ -108,14 +105,17 @@ public void testFindRelatedEntitiesSourceType(String datasetType, // only test cases with "user" type fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, - List relationshipTypes, - RelationshipFilter relationships, - List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3119 throw new SkipException("Neo4jGraphService does not support empty destination type"); @@ -125,7 +125,8 @@ public void testFindRelatedEntitiesDestinationType(String datasetType, // only test cases with "HasOwner" relatioship fail due to this bug throw new SkipException("Neo4jGraphService does not apply source / destination types"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @@ -160,7 +161,8 @@ public void testRemoveEdgesFromNodeNoRelationshipTypes() { @Override public void testConcurrentAddEdge() { // https://github.com/datahub-project/datahub/issues/3141 - throw new SkipException("Neo4jGraphService does not manage to add all edges added concurrently"); + throw new SkipException( + "Neo4jGraphService does not manage to add all edges added concurrently"); } @Test @@ -179,28 +181,42 @@ public void testConcurrentRemoveNodes() { @Test public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } private Set getPathUrnArraysFromLineageResult(EntityLineageResult result) { - return result.getRelationships() - .stream() + return result.getRelationships().stream() .map(x -> x.getPaths().get(0)) .collect(Collectors.toSet()); } @@ -209,22 +225,23 @@ private Set getPathUrnArraysFromLineageResult(EntityLineageResult resu public void testGetLineage() { GraphService service = getGraphService(); - List edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), - - // another path between d2 and d5 which is shorter - // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 - new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), - new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null) - ); + List edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d5 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFiveUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null), + + // another path between d2 and d5 which is shorter + // d1 <-DownstreamOf- d4 <-DownstreamOf- d5 + new Edge(datasetFourUrn, datasetOneUrn, downstreamOf, 13L, null, 13L, null, null), + new Edge(datasetFiveUrn, datasetFourUrn, downstreamOf, 13L, null, 13L, null, null)); edges.forEach(service::addEdge); // simple path finding - final var upstreamLineageDataset3Hop3 = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + final var upstreamLineageDataset3Hop3 = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals(upstreamLineageDataset3Hop3.getTotal().intValue(), 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset3Hop3), @@ -234,7 +251,8 @@ public void testGetLineage() { new UrnArray(datasetThreeUrn, datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // simple path finding - final var upstreamLineageDatasetFiveHop2 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + final var upstreamLineageDatasetFiveHop2 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageDatasetFiveHop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDatasetFiveHop2), @@ -244,8 +262,10 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn), new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); - // there are two paths from p5 to p1, one longer and one shorter, and the longer one is discarded from result - final var upstreamLineageDataset5Hop5 = service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); + // there are two paths from p5 to p1, one longer and one shorter, and the longer one is + // discarded from result + final var upstreamLineageDataset5Hop5 = + service.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 1000, 5); assertEquals(upstreamLineageDataset5Hop5.getTotal().intValue(), 5); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageDataset5Hop5), @@ -257,7 +277,8 @@ public void testGetLineage() { new UrnArray(datasetFiveUrn, datasetFourUrn, datasetOneUrn))); // downstream lookup - final var downstreamLineageDataset1Hop2 = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); + final var downstreamLineageDataset1Hop2 = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 2); assertEquals(downstreamLineageDataset1Hop2.getTotal().intValue(), 4); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageDataset1Hop2), @@ -272,17 +293,18 @@ public void testGetLineage() { public void testGetLineageTimeFilterQuery() throws Exception { GraphService service = getGraphService(); - List edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null) - ); + List edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 <-DownstreamOf- d4 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 1L, null, 3L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 5L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + new Edge(datasetFourUrn, datasetThreeUrn, downstreamOf, 11L, null, null, null, null)); edges.forEach(service::addEdge); // no time filtering - EntityLineageResult upstreamLineageTwoHops = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); + EntityLineageResult upstreamLineageTwoHops = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2); assertEquals(upstreamLineageTwoHops.getTotal().intValue(), 2); assertEquals(upstreamLineageTwoHops.getRelationships().size(), 2); assertEquals( @@ -292,16 +314,17 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetFourUrn, datasetThreeUrn, datasetTwoUrn))); // with time filtering - EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); + EntityLineageResult upstreamLineageTwoHopsWithTimeFilter = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 1000, 2, 10L, 12L); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getTotal().intValue(), 1); assertEquals(upstreamLineageTwoHopsWithTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTwoHopsWithTimeFilter), - Set.of( - new UrnArray(datasetFourUrn, datasetThreeUrn))); + Set.of(new UrnArray(datasetFourUrn, datasetThreeUrn))); // with time filtering - EntityLineageResult upstreamLineageTimeFilter = service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); + EntityLineageResult upstreamLineageTimeFilter = + service.getLineage(datasetTwoUrn, LineageDirection.UPSTREAM, 0, 1000, 4, 2L, 6L); assertEquals(upstreamLineageTimeFilter.getTotal().intValue(), 2); assertEquals(upstreamLineageTimeFilter.getRelationships().size(), 2); assertEquals( @@ -311,32 +334,33 @@ public void testGetLineageTimeFilterQuery() throws Exception { new UrnArray(datasetTwoUrn, dataJobOneUrn, datasetOneUrn))); // with time filtering - EntityLineageResult downstreamLineageTimeFilter = service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); + EntityLineageResult downstreamLineageTimeFilter = + service.getLineage(datasetOneUrn, LineageDirection.DOWNSTREAM, 0, 1000, 4, 0L, 4L); assertEquals(downstreamLineageTimeFilter.getTotal().intValue(), 1); assertEquals(downstreamLineageTimeFilter.getRelationships().size(), 1); assertEquals( getPathUrnArraysFromLineageResult(downstreamLineageTimeFilter), - Set.of( - new UrnArray(datasetOneUrn, dataJobOneUrn))); + Set.of(new UrnArray(datasetOneUrn, dataJobOneUrn))); } @Test public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { GraphService service = getGraphService(); - List edges = Arrays.asList( - // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 - new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), - new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), + List edges = + Arrays.asList( + // d1 <-Consumes- dj1 -Produces-> d2 <-DownstreamOf- d3 + new Edge(dataJobOneUrn, datasetOneUrn, consumes, 5L, null, 5L, null, null), + new Edge(dataJobOneUrn, datasetTwoUrn, produces, 7L, null, 7L, null, null), + new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, 9L, null, null, null, null), - // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) - new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null) - ); + // d1 <-DownstreamOf- d3 (shorter path from d3 to d1, but with very old time) + new Edge(datasetThreeUrn, datasetOneUrn, downstreamOf, 1L, null, 2L, null, null)); edges.forEach(service::addEdge); // no time filtering, shorter path from d3 to d1 is returned - EntityLineageResult upstreamLineageNoTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); + EntityLineageResult upstreamLineageNoTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageNoTimeFiltering), Set.of( @@ -345,7 +369,8 @@ public void testGetLineageTimeFilteringSkipsShorterButNonMatchingPaths() { new UrnArray(datasetThreeUrn, datasetOneUrn))); // with time filtering, shorter path from d3 to d1 is excluded so longer path is returned - EntityLineageResult upstreamLineageTimeFiltering = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); + EntityLineageResult upstreamLineageTimeFiltering = + service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 1000, 3, 3L, 17L); assertEquals( getPathUrnArraysFromLineageResult(upstreamLineageTimeFiltering), Set.of( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java index ba4e4cec37914..fa04de340e12f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/neo4j/Neo4jTestServerBuilder.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.graph.neo4j; +import apoc.path.PathExplorer; import java.io.File; import java.net.URI; - -import apoc.path.PathExplorer; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.harness.Neo4j; import org.neo4j.harness.Neo4jBuilder; @@ -19,9 +18,7 @@ private Neo4jTestServerBuilder(Neo4jBuilder builder) { } public Neo4jTestServerBuilder() { - this(new InProcessNeo4jBuilder() - .withProcedure(PathExplorer.class) - ); + this(new InProcessNeo4jBuilder().withProcedure(PathExplorer.class)); } public Neo4jTestServerBuilder(File workingDirectory) { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java index baed3ade0d207..9fc9490bfd7ef 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/ESGraphQueryDAOTest.java @@ -23,7 +23,8 @@ public class ESGraphQueryDAOTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_query_filters_1.json"; @Test private static void testGetQueryForLineageFullArguments() throws Exception { @@ -32,20 +33,19 @@ private static void testGetQueryForLineageFullArguments() throws Exception { String expectedQuery = Resources.toString(url, StandardCharsets.UTF_8); List urns = new ArrayList<>(); - List edgeInfos = new ArrayList<>(ImmutableList.of( - new LineageRegistry.EdgeInfo("DownstreamOf", RelationshipDirection.INCOMING, Constants.DATASET_ENTITY_NAME) - )); + List edgeInfos = + new ArrayList<>( + ImmutableList.of( + new LineageRegistry.EdgeInfo( + "DownstreamOf", + RelationshipDirection.INCOMING, + Constants.DATASET_ENTITY_NAME))); GraphFilters graphFilters = new GraphFilters(ImmutableList.of(Constants.DATASET_ENTITY_NAME)); Long startTime = 0L; Long endTime = 1L; - QueryBuilder builder = ESGraphQueryDAO.getQueryForLineage( - urns, - edgeInfos, - graphFilters, - startTime, - endTime - ); + QueryBuilder builder = + ESGraphQueryDAO.getQueryForLineage(urns, edgeInfos, graphFilters, startTime, endTime); Assert.assertEquals(builder.toString(), expectedQuery); } @@ -59,73 +59,51 @@ private static void testAddEdgeToPaths() { // Case 0: Add with no existing paths. Map nodePaths = new HashMap<>(); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - UrnArrayArray expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + UrnArrayArray expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 1: No paths to parent. nodePaths = new HashMap<>(); - nodePaths.put(UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), new UrnArrayArray()); + nodePaths.put( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,Other,PROD)"), + new UrnArrayArray()); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray(ImmutableList.of(new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 2: 1 Existing Path to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); - UrnArrayArray existingPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )) - )); + Urn testParentParent = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent,PROD)"); + UrnArrayArray existingPathsToParent = + new UrnArrayArray( + ImmutableList.of(new UrnArray(ImmutableList.of(testParentParent, testParent)))); nodePaths.put(testParent, existingPathsToParent); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 3: > 1 Existing Paths to Parent Node nodePaths = new HashMap<>(); - Urn testParentParent2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); - UrnArrayArray existingPathsToParent2 = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + Urn testParentParent2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,TestParent2,PROD)"); + UrnArrayArray existingPathsToParent2 = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); nodePaths.put(testParent, existingPathsToParent2); ESGraphQueryDAO.addEdgeToPaths(nodePaths, testParent, testChild); - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); // Case 4: Build graph from empty by adding multiple edges @@ -139,34 +117,23 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - UrnArrayArray expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + UrnArrayArray expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent, testChild)), + new UrnArray(ImmutableList.of(testParentParent2, testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); - // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things happen. + // Case 5: Mainly documentation: Verify that if you build the graph out of order bad things + // happen. // Also test duplicate edge addition nodePaths = new HashMap<>(); // Add edge to testChild first! Before path to testParent has been constructed. @@ -182,29 +149,19 @@ private static void testAddEdgeToPaths() { Assert.assertNull(nodePaths.get(testParentParent2)); // Verify paths to testParent - expectedPathsToParent = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParentParent, - testParent - )), - new UrnArray(ImmutableList.of( - testParentParent2, - testParent - )) - )); + expectedPathsToParent = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParentParent, testParent)), + new UrnArray(ImmutableList.of(testParentParent2, testParent)))); Assert.assertEquals(nodePaths.get(testParent), expectedPathsToParent); // Verify paths to testChild are INCORRECT: partial & duplicated - expectedPathsToChild = new UrnArrayArray(ImmutableList.of( - new UrnArray(ImmutableList.of( - testParent, - testChild - )), - new UrnArray(ImmutableList.of( - testParent, - testChild - )) - )); + expectedPathsToChild = + new UrnArrayArray( + ImmutableList.of( + new UrnArray(ImmutableList.of(testParent, testChild)), + new UrnArray(ImmutableList.of(testParent, testChild)))); Assert.assertEquals(nodePaths.get(testChild), expectedPathsToChild); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java index 0ce43c9d31571..2f8fba0083aa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/SearchGraphServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.graph.search; +import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; +import static com.linkedin.metadata.search.utils.QueryUtils.*; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; import com.linkedin.common.urn.DatasetUrn; @@ -26,6 +30,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import io.datahubproject.test.search.SearchTestUtils; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RestHighLevelClient; import org.testng.SkipException; @@ -33,27 +43,16 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.List; - -import static com.linkedin.metadata.graph.elastic.ElasticSearchGraphService.INDEX_NAME; -import static com.linkedin.metadata.search.utils.QueryUtils.*; -import static org.testng.Assert.assertEquals; - -abstract public class SearchGraphServiceTestBase extends GraphServiceTestBase { +public abstract class SearchGraphServiceTestBase extends GraphServiceTestBase { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private final IndexConvention _indexConvention = new IndexConventionImpl(null); private final String _indexName = _indexConvention.getIndexName(INDEX_NAME); @@ -76,9 +75,19 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchGraphService buildService() { LineageRegistry lineageRegistry = new LineageRegistry(SnapshotEntityRegistry.getInstance()); - ESGraphQueryDAO readDAO = new ESGraphQueryDAO(getSearchClient(), lineageRegistry, _indexConvention, GraphQueryConfiguration.testDefaults); + ESGraphQueryDAO readDAO = + new ESGraphQueryDAO( + getSearchClient(), + lineageRegistry, + _indexConvention, + GraphQueryConfiguration.testDefaults); ESGraphWriteDAO writeDAO = new ESGraphWriteDAO(_indexConvention, getBulkProcessor(), 1); - return new ElasticSearchGraphService(lineageRegistry, getBulkProcessor(), _indexConvention, writeDAO, readDAO, + return new ElasticSearchGraphService( + lineageRegistry, + getBulkProcessor(), + _indexConvention, + writeDAO, + readDAO, getIndexBuilder()); } @@ -94,7 +103,8 @@ protected void syncAfterWrite() throws Exception { } @Override - protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitiesResult expected) { + protected void assertEqualsAnyOrder( + RelatedEntitiesResult actual, RelatedEntitiesResult expected) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed // actual.count and actual.total not tested due to duplicates @@ -103,112 +113,160 @@ protected void assertEqualsAnyOrder(RelatedEntitiesResult actual, RelatedEntitie } @Override - protected void assertEqualsAnyOrder(List actual, List expected, Comparator comparator) { + protected void assertEqualsAnyOrder( + List actual, List expected, Comparator comparator) { // https://github.com/datahub-project/datahub/issues/3115 // ElasticSearchGraphService produces duplicates, which is here ignored until fixed assertEquals(new HashSet<>(actual), new HashSet<>(expected)); } @Override - public void testFindRelatedEntitiesSourceEntityFilter(Filter sourceEntityFilter, List relationshipTypes, - RelationshipFilter relationships, List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceEntityFilter( + Filter sourceEntityFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesSourceEntityFilter(sourceEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesSourceEntityFilter( + sourceEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationEntityFilter(Filter destinationEntityFilter, - List relationshipTypes, RelationshipFilter relationships, List expectedRelatedEntities) + public void testFindRelatedEntitiesDestinationEntityFilter( + Filter destinationEntityFilter, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testFindRelatedEntitiesDestinationEntityFilter(destinationEntityFilter, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationEntityFilter( + destinationEntityFilter, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesSourceType(String datasetType, List relationshipTypes, - RelationshipFilter relationships, List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesSourceType( + String datasetType, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty source type"); } - super.testFindRelatedEntitiesSourceType(datasetType, relationshipTypes, relationships, expectedRelatedEntities); + super.testFindRelatedEntitiesSourceType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Override - public void testFindRelatedEntitiesDestinationType(String datasetType, List relationshipTypes, - RelationshipFilter relationships, List expectedRelatedEntities) throws Exception { + public void testFindRelatedEntitiesDestinationType( + String datasetType, + List relationshipTypes, + RelationshipFilter relationships, + List expectedRelatedEntities) + throws Exception { if (relationships.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } if (datasetType != null && datasetType.isEmpty()) { // https://github.com/datahub-project/datahub/issues/3116 throw new SkipException("ElasticSearchGraphService does not support empty destination type"); } - super.testFindRelatedEntitiesDestinationType(datasetType, relationshipTypes, relationships, - expectedRelatedEntities); + super.testFindRelatedEntitiesDestinationType( + datasetType, relationshipTypes, relationships, expectedRelatedEntities); } @Test @Override public void testFindRelatedEntitiesNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Override - public void testRemoveEdgesFromNode(@Nonnull Urn nodeToRemoveFrom, @Nonnull List relationTypes, - @Nonnull RelationshipFilter relationshipFilter, List expectedOutgoingRelatedUrnsBeforeRemove, + public void testRemoveEdgesFromNode( + @Nonnull Urn nodeToRemoveFrom, + @Nonnull List relationTypes, + @Nonnull RelationshipFilter relationshipFilter, + List expectedOutgoingRelatedUrnsBeforeRemove, List expectedIncomingRelatedUrnsBeforeRemove, List expectedOutgoingRelatedUrnsAfterRemove, - List expectedIncomingRelatedUrnsAfterRemove) throws Exception { + List expectedIncomingRelatedUrnsAfterRemove) + throws Exception { if (relationshipFilter.getDirection() == RelationshipDirection.UNDIRECTED) { // https://github.com/datahub-project/datahub/issues/3114 - throw new SkipException("ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); + throw new SkipException( + "ElasticSearchGraphService does not implement UNDIRECTED relationship filter"); } - super.testRemoveEdgesFromNode(nodeToRemoveFrom, relationTypes, relationshipFilter, - expectedOutgoingRelatedUrnsBeforeRemove, expectedIncomingRelatedUrnsBeforeRemove, - expectedOutgoingRelatedUrnsAfterRemove, expectedIncomingRelatedUrnsAfterRemove); + super.testRemoveEdgesFromNode( + nodeToRemoveFrom, + relationTypes, + relationshipFilter, + expectedOutgoingRelatedUrnsBeforeRemove, + expectedIncomingRelatedUrnsBeforeRemove, + expectedOutgoingRelatedUrnsAfterRemove, + expectedIncomingRelatedUrnsAfterRemove); } @Test @Override public void testRemoveEdgesFromNodeNoRelationshipTypes() { // https://github.com/datahub-project/datahub/issues/3117 - throw new SkipException("ElasticSearchGraphService does not support empty list of relationship types"); + throw new SkipException( + "ElasticSearchGraphService does not support empty list of relationship types"); } @Test // TODO: Only in ES for now since unimplemented in other services public void testRemoveEdge() throws Exception { - DatasetUrn datasetUrn = new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); + DatasetUrn datasetUrn = + new DatasetUrn(new DataPlatformUrn("snowflake"), "test", FabricType.TEST); TagUrn tagUrn = new TagUrn("newTag"); Edge edge = new Edge(datasetUrn, tagUrn, TAG_RELATIONSHIP, null, null, null, null, null); getGraphService().addEdge(edge); syncAfterWrite(); - RelatedEntitiesResult result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + RelatedEntitiesResult result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 1); getGraphService().removeEdge(edge); syncAfterWrite(); - result = getGraphService().findRelatedEntities(Collections.singletonList(datasetType), - newFilter(Collections.singletonMap("urn", datasetUrn.toString())), Collections.singletonList("tag"), - EMPTY_FILTER, Collections.singletonList(TAG_RELATIONSHIP), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), 0, 100); + result = + getGraphService() + .findRelatedEntities( + Collections.singletonList(datasetType), + newFilter(Collections.singletonMap("urn", datasetUrn.toString())), + Collections.singletonList("tag"), + EMPTY_FILTER, + Collections.singletonList(TAG_RELATIONSHIP), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.OUTGOING), + 0, + 100); assertEquals(result.getTotal(), 0); } @@ -239,15 +297,39 @@ public void testTimestampLineage() throws Exception { // Populate one upstream and two downstream edges at initialTime Long initialTime = 1000L; - List edges = Arrays.asList( - // One upstream edge - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, initialTime, null, null), - // Two downstream - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - new Edge(datasetFourUrn, datasetTwoUrn, downstreamOf, initialTime, null, initialTime, null, null), - // One with null values, should always be returned - new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null) - ); + List edges = + Arrays.asList( + // One upstream edge + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // Two downstream + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + new Edge( + datasetFourUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + initialTime, + null, + null), + // One with null values, should always be returned + new Edge(datasetFiveUrn, datasetTwoUrn, downstreamOf, null, null, null, null, null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); @@ -259,120 +341,103 @@ public void testTimestampLineage() throws Exception { Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Timestamp before - upstreamResult = getUpstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - 0L, - initialTime - 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, 0L, initialTime - 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, 0L, initialTime - 10); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp after - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - initialTime + 100); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, initialTime + 100); Assert.assertEquals(new Integer(0), upstreamResult.getTotal()); Assert.assertEquals(new Integer(1), downstreamResult.getTotal()); // Timestamp included - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - initialTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, initialTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Update only one of the downstream edges Long updatedTime = 2000L; - edges = Arrays.asList( - new Edge(datasetTwoUrn, datasetOneUrn, downstreamOf, initialTime, null, updatedTime, null, null), - new Edge(datasetThreeUrn, datasetTwoUrn, downstreamOf, initialTime, null, updatedTime, null, null) - ); + edges = + Arrays.asList( + new Edge( + datasetTwoUrn, + datasetOneUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null), + new Edge( + datasetThreeUrn, + datasetTwoUrn, + downstreamOf, + initialTime, + null, + updatedTime, + null, + null)); edges.forEach(getGraphService()::addEdge); syncAfterWrite(); // Without timestamps - upstreamResult = getUpstreamLineage(datasetTwoUrn, - null, - null); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - null, - null); + upstreamResult = getUpstreamLineage(datasetTwoUrn, null, null); + downstreamResult = getDownstreamLineage(datasetTwoUrn, null, null); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes initial time and updated time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime - 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime - 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(3), downstreamResult.getTotal()); // Window includes updated time but not initial time - upstreamResult = getUpstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); - downstreamResult = getDownstreamLineage(datasetTwoUrn, - initialTime + 10, - updatedTime + 10); + upstreamResult = getUpstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); + downstreamResult = getDownstreamLineage(datasetTwoUrn, initialTime + 10, updatedTime + 10); Assert.assertEquals(new Integer(1), upstreamResult.getTotal()); Assert.assertEquals(new Integer(2), downstreamResult.getTotal()); - } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Upstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getUpstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.UPSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.UPSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The Downstream lineage for urn from the window from startTime to endTime */ private EntityLineageResult getDownstreamLineage(Urn urn, Long startTime, Long endTime) { - return getLineage(urn, - LineageDirection.DOWNSTREAM, - startTime, - endTime); + return getLineage(urn, LineageDirection.DOWNSTREAM, startTime, endTime); } /** * Utility method to reduce repeated parameters for lineage tests + * * @param urn URN to query * @param direction Direction to query (upstream/downstream) * @param startTime Start of time-based lineage query * @param endTime End of time-based lineage query * @return The lineage for urn from the window from startTime to endTime in direction */ - private EntityLineageResult getLineage(Urn urn, LineageDirection direction, Long startTime, Long endTime) { - return getGraphService().getLineage(urn, - direction, - 0, - 0, - 3, - startTime, - endTime); + private EntityLineageResult getLineage( + Urn urn, LineageDirection direction, Long startTime, Long endTime) { + return getGraphService().getLineage(urn, direction, 0, 0, 3, startTime, endTime); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java index 989f9ae197239..3c892dddb70e1 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/TimeFilterUtilsTest.java @@ -1,17 +1,18 @@ package com.linkedin.metadata.graph.search; import com.google.common.io.Resources; +import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import java.net.URL; import java.nio.charset.StandardCharsets; - -import com.linkedin.metadata.graph.elastic.TimeFilterUtils; import org.opensearch.index.query.QueryBuilder; import org.testng.Assert; import org.testng.annotations.Test; public class TimeFilterUtilsTest { - private static final String TEST_QUERY_FILE = "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + private static final String TEST_QUERY_FILE = + "elasticsearch/sample_filters/lineage_time_query_filters_1.json"; + @Test private static void testGetEdgeTimeFilterQuery() throws Exception { URL url = Resources.getResource(TEST_QUERY_FILE); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java index 7b550311bf823..b2c49857cb0b9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/elasticsearch/SearchGraphServiceElasticSearchTest.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.search.elasticsearch.ElasticSearchSuite; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -16,12 +15,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceElasticSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -45,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java index eabfb523fb910..28b545f817539 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/search/opensearch/SearchGraphServiceOpenSearchTest.java @@ -15,12 +15,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SearchGraphServiceOpenSearchTest extends SearchGraphServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override @@ -44,5 +41,4 @@ protected ESIndexBuilder getIndexBuilder() { public void initTest() { AssertJUnit.assertNotNull(_searchClient); } - } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index c6677c171b30e..df332cacaa751 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.graph.sibling; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.Siblings; @@ -24,27 +29,23 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class SiblingGraphServiceTest { - /** - * Some test URN types. - */ + /** Some test URN types. */ protected static String datasetType = "dataset"; - /** - * Some test datasets. - */ - protected static String datasetOneUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; - protected static String datasetTwoUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; - protected static String datasetThreeUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; - protected static String datasetFourUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; - protected static String datasetFiveUrnString = "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; + /** Some test datasets. */ + protected static String datasetOneUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetOne,PROD)"; + + protected static String datasetTwoUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetTwo,PROD)"; + protected static String datasetThreeUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetThree,PROD)"; + protected static String datasetFourUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFour,PROD)"; + protected static String datasetFiveUrnString = + "urn:li:" + datasetType + ":(urn:li:dataPlatform:type,SampleDatasetFive,PROD)"; protected static Urn datasetOneUrn = createFromString(datasetOneUrnString); protected static Urn datasetTwoUrn = createFromString(datasetTwoUrnString); @@ -52,11 +53,9 @@ public class SiblingGraphServiceTest { protected static Urn datasetFourUrn = createFromString(datasetFourUrnString); protected static Urn datasetFiveUrn = createFromString(datasetFiveUrnString); - - /** - * Some test relationships. - */ + /** Some test relationships. */ protected static String downstreamOf = "DownstreamOf"; + protected static String upstreamOf = "UpstreamOf"; private GraphService _graphService; @@ -100,15 +99,15 @@ public void testNoSiblingMetadata() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(null); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that there is no sibling metadataa assertEquals(upstreamLineage, mockResult); @@ -145,24 +144,23 @@ public void testNoSiblingInResults() { mockResult.setFiltered(0); mockResult.setRelationships(relationships); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); siblingMockResult.setStart(0); siblingMockResult.setTotal(0); siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetFiveUrn, LineageDirection.UPSTREAM, 0, 97, 1, null, null)) + .thenReturn(siblingMockResult); Siblings noRelevantSiblingsResponse = new Siblings(); noRelevantSiblingsResponse.setPrimary(true); noRelevantSiblingsResponse.setSiblings(new UrnArray(ImmutableList.of(datasetFiveUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(noRelevantSiblingsResponse); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(noRelevantSiblingsResponse); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -176,17 +174,18 @@ public void testNoSiblingInResults() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert sibling graph service is a pass through in the case that your sibling has no lineage assertEquals(upstreamLineage, mockResult); @@ -227,20 +226,18 @@ public void testSiblingInResult() throws Exception { siblingMockResult.setCount(0); siblingMockResult.setRelationships(new LineageRelationshipArray()); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 98, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -254,11 +251,11 @@ public void testSiblingInResult() throws Exception { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); @@ -270,7 +267,8 @@ public void testSiblingInResult() throws Exception { expectedResult.setFiltered(1); expectedResult.setRelationships(new LineageRelationshipArray(relationship1, relationship2)); - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your sibling will be filtered out of your lineage assertEquals(upstreamLineage, expectedResult); @@ -311,7 +309,8 @@ public void testCombineSiblingResult() { expectedRelationships.add(relationship2); expectedRelationships.add(relationship4); - expectedRelationships.add(relationship1); // expect just one relationship1 despite duplicates in sibling lineage + expectedRelationships.add( + relationship1); // expect just one relationship1 despite duplicates in sibling lineage expectedResult.setCount(3); expectedResult.setStart(0); @@ -326,27 +325,39 @@ public void testCombineSiblingResult() { siblingRelationships.add(relationship2); siblingRelationships.add(relationship4); - siblingRelationships.add(relationship1); // duplicate from sibling's lineage, we should not see duplicates in result + siblingRelationships.add( + relationship1); // duplicate from sibling's lineage, we should not see duplicates in result siblingMockResult.setStart(0); siblingMockResult.setTotal(3); siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -360,18 +371,19 @@ public void testCombineSiblingResult() { dataset3Siblings.setPrimary(false); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of())); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFiveUrn, ImmutableList.of(dataset3Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFiveUrn, ImmutableList.of(dataset3Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will be combined with your siblings lineage assertEquals(upstreamLineage, expectedResult); @@ -430,20 +442,18 @@ public void testUpstreamOfSiblings() { siblingMockResult.setCount(2); siblingMockResult.setRelationships(siblingRelationships); - when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(siblingMockResult); + when(_graphService.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(siblingMockResult); - - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(mockResult); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(true); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -465,37 +475,37 @@ public void testUpstreamOfSiblings() { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); when(_graphService.getLineage( - datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null - )).thenReturn(siblingMockResult); - + datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1, null, null)) + .thenReturn(siblingMockResult); - when(_graphService.getLineage( - datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null - )).thenReturn(mockResult); + when(_graphService.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 99, 1, null, null)) + .thenReturn(mockResult); siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); upstreamLineage = service.getLineage(datasetThreeUrn, LineageDirection.UPSTREAM, 0, 100, 1); @@ -510,7 +520,8 @@ public void testUpstreamOfSiblings() { } // we should be combining lineage of siblings of siblings - // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has sibling dataset2. dataset3 has upstream dataset4. + // ie. dataset1 has sibling dataset2. dataset 2 has siblings dataset1 and dataset3. dataset3 has + // sibling dataset2. dataset3 has upstream dataset4. // requesting upstream for dataset1 should give us dataset4 @Test public void testUpstreamOfSiblingSiblings() { @@ -547,57 +558,77 @@ public void testUpstreamOfSiblingSiblings() { emptyLineageResult.setCount(0); when(_graphService.getLineage( - Mockito.eq(datasetOneUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetOneUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetTwoUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(emptyLineageResult); + Mockito.eq(datasetTwoUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(emptyLineageResult); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).thenReturn(mockResult); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .thenReturn(mockResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(true); dataset1Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset1Siblings); + when(_mockEntityService.getLatestAspect(datasetOneUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset1Siblings); Siblings dataset2Siblings = new Siblings(); dataset2Siblings.setPrimary(true); dataset2Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetOneUrn, datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset2Siblings); + when(_mockEntityService.getLatestAspect(datasetTwoUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset2Siblings); Siblings dataset3Siblings = new Siblings(); dataset3Siblings.setPrimary(true); dataset3Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset3Siblings); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset3Siblings); Siblings dataset4Siblings = new Siblings(); dataset4Siblings.setPrimary(true); dataset4Siblings.setSiblings(new UrnArray()); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(dataset4Siblings); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(dataset4Siblings); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; for (Urn urn : List.of(datasetOneUrn, datasetTwoUrn, datasetThreeUrn)) { - EntityLineageResult upstreamLineage = service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetOneUrn, LineageDirection.UPSTREAM, 0, 100, 1); assertEquals(upstreamLineage, expectedResult); } @@ -659,26 +690,38 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { siblingMockResult.setRelationships(siblingRelationships); when(_graphService.getLineage( - Mockito.eq(datasetThreeUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> siblingMockResult.clone()); + Mockito.eq(datasetThreeUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> siblingMockResult.clone()); when(_graphService.getLineage( - Mockito.eq(datasetFourUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockResult.clone()); + Mockito.eq(datasetFourUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockResult.clone()); Siblings primarySibling = new Siblings(); primarySibling.setPrimary(true); primarySibling.setSiblings(new UrnArray(ImmutableList.of(datasetThreeUrn))); - when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySibling); + when(_mockEntityService.getLatestAspect(datasetFourUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySibling); Siblings siblingInSearchResult = new Siblings(); siblingInSearchResult.setPrimary(false); siblingInSearchResult.setSiblings(new UrnArray(ImmutableList.of(datasetFourUrn))); - when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)).thenReturn(siblingInSearchResult); + when(_mockEntityService.getLatestAspect(datasetThreeUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(siblingInSearchResult); Siblings dataset1Siblings = new Siblings(); dataset1Siblings.setPrimary(false); @@ -700,19 +743,20 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { dataset5Siblings.setPrimary(true); dataset5Siblings.setSiblings(new UrnArray(ImmutableList.of(datasetTwoUrn))); - Map> siblingsMap = ImmutableMap.of( - datasetOneUrn, ImmutableList.of(dataset1Siblings), - datasetTwoUrn, ImmutableList.of(dataset2Siblings), - datasetThreeUrn, ImmutableList.of(dataset3Siblings), - datasetFourUrn, ImmutableList.of(dataset4Siblings), - datasetFiveUrn, ImmutableList.of(dataset5Siblings) - ); + Map> siblingsMap = + ImmutableMap.of( + datasetOneUrn, ImmutableList.of(dataset1Siblings), + datasetTwoUrn, ImmutableList.of(dataset2Siblings), + datasetThreeUrn, ImmutableList.of(dataset3Siblings), + datasetFourUrn, ImmutableList.of(dataset4Siblings), + datasetFiveUrn, ImmutableList.of(dataset5Siblings)); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; - EntityLineageResult upstreamLineage = service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); + EntityLineageResult upstreamLineage = + service.getLineage(datasetFourUrn, LineageDirection.UPSTREAM, 0, 100, 1); // assert your lineage will not contain two siblings assertEquals(upstreamLineage, expectedResult); @@ -733,11 +777,19 @@ public void testRelationshipWithSibling() throws CloneNotSupportedException { @Test public void testSiblingCombinations() throws URISyntaxException { - Urn primarySiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); - Urn alternateSiblingUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); - - Urn upstreamUrn1 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); - Urn upstreamUrn2 = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); + Urn primarySiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:dbt,PrimarySibling,PROD)"); + Urn alternateSiblingUrn = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,SecondarySibling,PROD)"); + + Urn upstreamUrn1 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream1,PROD)"); + Urn upstreamUrn2 = + Urn.createFromString( + "urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Upstream2,PROD)"); LineageRelationshipArray alternateDownstreamRelationships = new LineageRelationshipArray(); // Populate sibling service @@ -745,13 +797,15 @@ public void testSiblingCombinations() throws URISyntaxException { primarySiblings.setPrimary(true); primarySiblings.setSiblings(new UrnArray(ImmutableList.of(alternateSiblingUrn))); - when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(primarySiblings); + when(_mockEntityService.getLatestAspect(primarySiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(primarySiblings); Siblings secondarySiblings = new Siblings(); secondarySiblings.setPrimary(false); secondarySiblings.setSiblings(new UrnArray(ImmutableList.of(primarySiblingUrn))); - when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)).thenReturn(secondarySiblings); + when(_mockEntityService.getLatestAspect(alternateSiblingUrn, SIBLINGS_ASPECT_NAME)) + .thenReturn(secondarySiblings); Map> siblingsMap = new HashMap<>(); siblingsMap.put(primarySiblingUrn, ImmutableList.of(primarySiblings)); @@ -760,7 +814,13 @@ public void testSiblingCombinations() throws URISyntaxException { // Create many downstreams of the alternate URN string final int numDownstreams = 42; for (int i = 0; i < numDownstreams; i++) { - Urn downstreamUrn = Urn.createFromString("urn:li:" + datasetType + ":(urn:li:dataPlatform:snowflake,Downstream" + i + ",PROD)"); + Urn downstreamUrn = + Urn.createFromString( + "urn:li:" + + datasetType + + ":(urn:li:dataPlatform:snowflake,Downstream" + + i + + ",PROD)"); LineageRelationship relationship = new LineageRelationship(); relationship.setDegree(0); relationship.setType(upstreamOf); @@ -785,9 +845,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateUpstreamResult.setCount(3); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateUpstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateUpstreamResult.clone()); EntityLineageResult mockAlternateDownstreamResult = new EntityLineageResult(); mockAlternateDownstreamResult.setRelationships(alternateDownstreamRelationships); @@ -796,9 +861,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockAlternateDownstreamResult.setCount(numDownstreams); when(_graphService.getLineage( - Mockito.eq(alternateSiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockAlternateDownstreamResult.clone()); + Mockito.eq(alternateSiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockAlternateDownstreamResult.clone()); // Set up mocks for primary sibling LineageRelationshipArray primaryUpstreamRelationships = new LineageRelationshipArray(); @@ -818,9 +888,14 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryUpstreamResult.setCount(2); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.UPSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryUpstreamResult.clone()); + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.UPSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryUpstreamResult.clone()); LineageRelationshipArray primaryDowntreamRelationships = new LineageRelationshipArray(); LineageRelationship relationship = new LineageRelationship(); @@ -836,26 +911,23 @@ public void testSiblingCombinations() throws URISyntaxException { mockPrimaryDownstreamResult.setCount(1); when(_graphService.getLineage( - Mockito.eq(primarySiblingUrn), Mockito.eq(LineageDirection.DOWNSTREAM), Mockito.anyInt(), Mockito.anyInt(), - Mockito.eq(1), Mockito.eq(null), Mockito.eq(null) - )).then(invocation -> mockPrimaryDownstreamResult.clone()); - + Mockito.eq(primarySiblingUrn), + Mockito.eq(LineageDirection.DOWNSTREAM), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(1), + Mockito.eq(null), + Mockito.eq(null))) + .then(invocation -> mockPrimaryDownstreamResult.clone()); when(_mockEntityService.getLatestAspects(any(), any())).thenReturn(siblingsMap); SiblingGraphService service = _client; // Tests for separateSiblings = true: primary sibling - EntityLineageResult primaryDownstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryDownstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.DOWNSTREAM, 0, 100, 1, true, Set.of(), null, null); LineageRelationshipArray expectedRelationships = new LineageRelationshipArray(); expectedRelationships.add(relationship); @@ -869,16 +941,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryDownstreamSeparated, expectedResultPrimarySeparated); - EntityLineageResult primaryUpstreamSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult primaryUpstreamSeparated = + service.getLineage( + primarySiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultPrimaryUpstreamSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamSeparated.setCount(2); expectedResultPrimaryUpstreamSeparated.setStart(0); @@ -889,16 +954,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primaryUpstreamSeparated, expectedResultPrimaryUpstreamSeparated); // Test for separateSiblings = true, secondary sibling - EntityLineageResult secondarySiblingSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondarySiblingSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + true, + Set.of(), + null, + null); EntityLineageResult expectedResultSecondarySeparated = new EntityLineageResult(); expectedResultSecondarySeparated.setCount(numDownstreams); @@ -909,16 +975,9 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondarySiblingSeparated, expectedResultSecondarySeparated); - EntityLineageResult secondaryUpstreamSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - true, - Set.of(), - null, - null); + EntityLineageResult secondaryUpstreamSeparated = + service.getLineage( + alternateSiblingUrn, LineageDirection.UPSTREAM, 0, 100, 1, true, Set.of(), null, null); EntityLineageResult expectedResultSecondaryUpstreamSeparated = new EntityLineageResult(); expectedResultSecondaryUpstreamSeparated.setCount(3); expectedResultSecondaryUpstreamSeparated.setStart(0); @@ -929,16 +988,17 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(secondaryUpstreamSeparated, expectedResultSecondaryUpstreamSeparated); // Test for separateSiblings = false, primary sibling - EntityLineageResult primarySiblingNonSeparated = service.getLineage( - primarySiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult primarySiblingNonSeparated = + service.getLineage( + primarySiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryNonSeparated = new EntityLineageResult(); expectedResultPrimaryNonSeparated.setCount(numDownstreams); expectedResultPrimaryNonSeparated.setStart(0); @@ -947,17 +1007,17 @@ public void testSiblingCombinations() throws URISyntaxException { expectedResultPrimaryNonSeparated.setRelationships(alternateDownstreamRelationships); assertEquals(primarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult primarySiblingNonSeparatedUpstream = service.getLineage( - primarySiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult primarySiblingNonSeparatedUpstream = + service.getLineage( + primarySiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); EntityLineageResult expectedResultPrimaryUpstreamNonSeparated = new EntityLineageResult(); expectedResultPrimaryUpstreamNonSeparated.setCount(2); expectedResultPrimaryUpstreamNonSeparated.setStart(0); @@ -967,29 +1027,30 @@ public void testSiblingCombinations() throws URISyntaxException { assertEquals(primarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); // Test for separateSiblings = false, secondary sibling - EntityLineageResult secondarySiblingNonSeparated = service.getLineage( - alternateSiblingUrn, - LineageDirection.DOWNSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null); + EntityLineageResult secondarySiblingNonSeparated = + service.getLineage( + alternateSiblingUrn, + LineageDirection.DOWNSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparated, expectedResultPrimaryNonSeparated); - EntityLineageResult secondarySiblingNonSeparatedUpstream = service.getLineage( - alternateSiblingUrn, - LineageDirection.UPSTREAM, - 0, - 100, - 1, - false, - new HashSet<>(), - null, - null - ); + EntityLineageResult secondarySiblingNonSeparatedUpstream = + service.getLineage( + alternateSiblingUrn, + LineageDirection.UPSTREAM, + 0, + 100, + 1, + false, + new HashSet<>(), + null, + null); assertEquals(secondarySiblingNonSeparatedUpstream, expectedResultPrimaryUpstreamNonSeparated); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java index 60e63ed001768..c0faf6fdfee6c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.recommendation; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntityUtil; @@ -11,34 +14,56 @@ import java.util.stream.Collectors; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class RecommendationsServiceTest { private final TestSource nonEligibleSource = - new TestSource("not eligible", "nonEligible", RecommendationRenderType.ENTITY_NAME_LIST, false, + new TestSource( + "not eligible", + "nonEligible", + RecommendationRenderType.ENTITY_NAME_LIST, + false, getContentFromString(ImmutableList.of("test"))); private final TestSource emptySource = - new TestSource("empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); + new TestSource( + "empty", "empty", RecommendationRenderType.ENTITY_NAME_LIST, true, ImmutableList.of()); private final TestSource valuesSource = - new TestSource("values", "values", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "values", + "values", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test"))); private final TestSource multiValuesSource = - new TestSource("multiValues", "multiValues", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "multiValues", + "multiValues", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromString(ImmutableList.of("test1", "test2", "test3", "test4"))); private final TestSource urnsSource = - new TestSource("urns", "urns", RecommendationRenderType.ENTITY_NAME_LIST, true, + new TestSource( + "urns", + "urns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn()))); private final TestSource multiUrnsSource = - new TestSource("multiUrns", "multiUrns", RecommendationRenderType.ENTITY_NAME_LIST, true, - getContentFromUrns(ImmutableList.of(TestEntityUtil.getTestEntityUrn(), TestEntityUtil.getTestEntityUrn(), - TestEntityUtil.getTestEntityUrn()))); + new TestSource( + "multiUrns", + "multiUrns", + RecommendationRenderType.ENTITY_NAME_LIST, + true, + getContentFromUrns( + ImmutableList.of( + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn(), + TestEntityUtil.getTestEntityUrn()))); private final RecommendationModuleRanker ranker = new SimpleRecommendationRanker(); private List getContentFromString(List values) { - return values.stream().map(value -> new RecommendationContent().setValue(value)).collect(Collectors.toList()); + return values.stream() + .map(value -> new RecommendationContent().setValue(value)) + .collect(Collectors.toList()); } private List getContentFromUrns(List urns) { @@ -50,15 +75,24 @@ private List getContentFromUrns(List urns) { @Test public void testService() throws URISyntaxException { // Test non-eligible and empty - RecommendationsService service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); - List result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + RecommendationsService service = + new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource), ranker); + List result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertTrue(result.isEmpty()); // Test empty with one valid source - service = new RecommendationsService(ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(nonEligibleSource, emptySource, valuesSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 1); RecommendationModule module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -67,10 +101,14 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), valuesSource.getContents()); // Test multiple sources - service = new RecommendationsService(ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), - ranker); - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); + service = + new RecommendationsService( + ImmutableList.of(valuesSource, multiValuesSource, urnsSource, multiUrnsSource), ranker); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 10); assertEquals(result.size(), 4); module = result.get(0); assertEquals(module.getTitle(), "values"); @@ -94,8 +132,11 @@ public void testService() throws URISyntaxException { assertEquals(module.getContent(), multiUrnsSource.getContents()); // Test limit - result = service.listRecommendations(Urn.createFromString("urn:li:corpuser:me"), - new RecommendationRequestContext().setScenario(ScenarioType.HOME), 2); + result = + service.listRecommendations( + Urn.createFromString("urn:li:corpuser:me"), + new RecommendationRequestContext().setScenario(ScenarioType.HOME), + 2); assertEquals(result.size(), 2); module = result.get(0); assertEquals(module.getTitle(), "values"); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index 0dc517eaf0d1c..dcc59d0632954 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.recommendation.candidatesource; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -19,15 +27,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.eq; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - - public class EntitySearchAggregationCandidateSourceTest { private EntitySearchService _entitySearchService = Mockito.mock(EntitySearchService.class); private EntitySearchAggregationSource _valueBasedCandidateSource; @@ -44,7 +43,8 @@ public void setup() { _urnBasedCandidateSource = buildCandidateSource("testUrn", true); } - private EntitySearchAggregationSource buildCandidateSource(String identifier, boolean isValueUrn) { + private EntitySearchAggregationSource buildCandidateSource( + String identifier, boolean isValueUrn) { return new EntitySearchAggregationSource(_entitySearchService) { @Override protected String getSearchFieldName() { @@ -77,7 +77,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return true; } }; @@ -85,9 +86,11 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo @Test public void testWhenSearchServiceReturnsEmpty() { - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(Collections.emptyMap()); - List candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertTrue(candidates.isEmpty()); assertFalse(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); } @@ -95,9 +98,11 @@ public void testWhenSearchServiceReturnsEmpty() { @Test public void testWhenSearchServiceReturnsValueResults() { // One result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); - List candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); + List candidates = + _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), "value1"); @@ -107,14 +112,16 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value1")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); assertTrue(_valueBasedCandidateSource.getRecommendationModule(USER, CONTEXT).isPresent()); // Multiple result - Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) + Mockito.when( + _entitySearchService.aggregateByValue(eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); candidates = _valueBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); @@ -126,7 +133,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value3")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -138,7 +146,8 @@ public void testWhenSearchServiceReturnsValueResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testValue").setValue("value2")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); @@ -153,7 +162,8 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); - List candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); + List candidates = + _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), testUrn1.toString()); @@ -163,7 +173,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn1.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); @@ -171,7 +182,9 @@ public void testWhenSearchServiceReturnsUrnResults() { // Multiple result Mockito.when(_entitySearchService.aggregateByValue(eq(null), eq("testUrn"), eq(null), anyInt())) - .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); + .thenReturn( + ImmutableMap.of( + testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); candidates = _urnBasedCandidateSource.getRecommendations(USER, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); @@ -182,7 +195,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn3.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 3L); @@ -194,7 +208,8 @@ public void testWhenSearchServiceReturnsUrnResults() { assertNotNull(params.getSearchParams()); assertTrue(StringUtils.isEmpty(params.getSearchParams().getQuery())); assertEquals(params.getSearchParams().getFilters().size(), 1); - assertEquals(params.getSearchParams().getFilters().get(0), + assertEquals( + params.getSearchParams().getFilters().get(0), new Criterion().setField("testUrn").setValue(testUrn2.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java index f5c3569821e00..3998e45195b25 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java @@ -14,13 +14,12 @@ public class RecommendationUtilsTest { private void testIsSupportedEntityType() { Urn testUrn = UrnUtils.getUrn("urn:li:corpuser:john"); Assert.assertTrue( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME)) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, + ImmutableSet.of(Constants.DATASET_ENTITY_NAME, Constants.CORP_USER_ENTITY_NAME))); Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME)) - ); - Assert.assertFalse( - RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet()) - ); + RecommendationUtils.isSupportedEntityType( + testUrn, ImmutableSet.of(Constants.DATASET_ENTITY_NAME))); + Assert.assertFalse(RecommendationUtils.isSupportedEntityType(testUrn, Collections.emptySet())); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java index 31672b6aa885f..666deb2c419d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/TestSource.java @@ -9,7 +9,6 @@ import lombok.Getter; import lombok.RequiredArgsConstructor; - @Getter @RequiredArgsConstructor public class TestSource implements RecommendationSource { @@ -36,13 +35,14 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return eligible; } @Override - public List getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return contents; } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java index 1757883f1a5a9..57fa51ffbdd90 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageSearchResultCacheKeyTest.java @@ -1,32 +1,34 @@ package com.linkedin.metadata.search; -import java.time.temporal.ChronoUnit; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotSame; +import java.time.temporal.ChronoUnit; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class LineageSearchResultCacheKeyTest extends AbstractTestNGSpringContextTests { @Test public void testNulls() { // ensure no NPE - assertEquals(new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey(null, null, null, null, null, ChronoUnit.DAYS)); } @Test public void testDateTruncation() { // expect start of day milli - assertEquals(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679615999999L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); - assertNotSame(new EntityLineageResultCacheKey(null, null, 1679529600000L, - 1679616000000L, null, ChronoUnit.DAYS), - new EntityLineageResultCacheKey(null, null, 1679530293000L, - 1679530293001L, null, ChronoUnit.DAYS)); + assertEquals( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679615999999L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); + assertNotSame( + new EntityLineageResultCacheKey( + null, null, 1679529600000L, 1679616000000L, null, ChronoUnit.DAYS), + new EntityLineageResultCacheKey( + null, null, 1679530293000L, 1679530293001L, null, ChronoUnit.DAYS)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index 696e3b62834bd..079ec08462515 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -1,5 +1,22 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.reset; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -46,11 +63,21 @@ import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.junit.Assert; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; -import org.opensearch.client.RestHighLevelClient; import org.opensearch.action.search.SearchRequest; +import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; @@ -58,50 +85,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import java.net.URISyntaxException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.reset; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - -abstract public class LineageServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class LineageServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -116,11 +115,13 @@ abstract public class LineageServiceTestBase extends AbstractTestNGSpringContext private static final Urn TEST_URN = TestEntityUtil.getTestEntityUrn(); private static final String TEST = "test"; private static final String TEST1 = "test1"; - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -137,20 +138,29 @@ public void setup() { } private void resetService(boolean withCache, boolean withLightingCache) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - SearchLineageCacheConfiguration searchLineageCacheConfiguration = new SearchLineageCacheConfiguration(); + SearchLineageCacheConfiguration searchLineageCacheConfiguration = + new SearchLineageCacheConfiguration(); searchLineageCacheConfiguration.setTtlSeconds(600L); searchLineageCacheConfiguration.setLightningThreshold(withLightingCache ? -1 : 300); - _lineageSearchService = spy(new LineageSearchService( - new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()), - _graphService, _cacheManager.getCache("test"), withCache, searchLineageCacheConfiguration)); + _lineageSearchService = + spy( + new LineageSearchService( + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()), + _graphService, + _cacheManager.getCache("test"), + withCache, + searchLineageCacheConfiguration)); } @BeforeMethod @@ -163,13 +173,27 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); _searchClientSpy = spy(getSearchClient()); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, _searchClientSpy, _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, _searchClientSpy, _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + _searchClientSpy, + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, _searchClientSpy, _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -179,7 +203,8 @@ private void clearCache(boolean withLightingCache) { } private EntityLineageResult mockResult(List lineageRelationships) { - return new EntityLineageResult().setRelationships(new LineageRelationshipArray(lineageRelationships)) + return new EntityLineageResult() + .setRelationships(new LineageRelationshipArray(lineageRelationships)) .setStart(0) .setCount(10) .setTotal(lineageRelationships.size()); @@ -187,18 +212,34 @@ private EntityLineageResult mockResult(List lineageRelation @Test public void testSearchService() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); - //just testing null input does not throw any exception + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + // just testing null input does not throw any exception searchAcrossLineage(null, null); searchResult = searchAcrossLineage(null, TEST); @@ -216,16 +257,32 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); @@ -255,15 +312,25 @@ public void testSearchService() throws Exception { assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); // Verify that highlighting was turned off in the query - ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ArgumentCaptor searchRequestCaptor = + ArgumentCaptor.forClass(SearchRequest.class); Mockito.verify(_searchClientSpy, times(1)).search(searchRequestCaptor.capture(), any()); SearchRequest capturedRequest = searchRequestCaptor.getValue(); assertNull(capturedRequest.source().highlighter()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -273,55 +340,136 @@ public void testSearchService() throws Exception { Mockito.reset(_graphService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "test1", - null, null, null, 0, 10, 0L, 1L, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "test1", null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "test1", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); clearCache(false); @@ -330,19 +478,28 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, TEST1); assertEquals(searchResult.getNumEntities().intValue(), 0); - } @Test public void testScrollAcrossLineage() throws Exception { - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageScrollResult scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertNull(scrollResult.getScrollId()); @@ -351,9 +508,18 @@ public void testScrollAcrossLineage() throws Exception { assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); // just testing null input does not throw any exception scrollAcrossLineage(null, null); @@ -374,17 +540,33 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); assertEquals(scrollResult.getEntities().size(), 0); assertNull(scrollResult.getScrollId()); clearCache(false); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 1); assertEquals(scrollResult.getEntities().get(0).getEntity(), urn); @@ -407,9 +589,12 @@ public void testScrollAcrossLineage() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); scrollResult = scrollAcrossLineage(null, TEST1); assertEquals(scrollResult.getNumEntities().intValue(), 0); @@ -426,15 +611,31 @@ public void testLightningSearchService() throws Exception { // Enable lightning resetService(true, true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); LineageSearchResult searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(TEST_URN).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); clearCache(true); @@ -448,32 +649,51 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn(mockResult(Collections.emptyList())); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn(mockResult(Collections.emptyList())); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "1"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(0).getDegree().intValue(), 1); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); searchResult = searchAcrossLineage(QueryUtils.newFilter("degree.keyword", "2"), testStar); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // resets spy Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -488,16 +708,27 @@ public void testLightningSearchService() throws Exception { searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt(), eq(null), eq(null))).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + anyInt(), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn2).setType("test").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().size(), 1); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); clearCache(true); // Test Cache Behavior @@ -505,59 +736,144 @@ public void testLightningSearchService() throws Exception { reset(_lineageSearchService); // Case 1: Use the maxHops in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(null), eq(null)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); - + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(null), + eq(null)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Case 2: Use the start and end time in the cache. - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L))).thenReturn(mockResult( - ImmutableList.of( - new LineageRelationship().setDegree(3).setType("type").setEntity(urn) - ) - )); + when(_graphService.getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L))) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setDegree(3).setType("type").setEntity(urn)))); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), "*", - null, null, null, 0, 10, 0L, 1L, - new SearchFlags().setSkipCache(false)); + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, + new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Hit the cache on second attempt - searchResult = _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", null, null, null, 0, 10, 0L, 1L, + searchResult = + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + null, + null, + null, + 0, + 10, + 0L, + 1L, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); /* * Test filtering @@ -566,70 +882,163 @@ public void testLightningSearchService() throws Exception { // Entity searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(1)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(1)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(DATASET_ENTITY_NAME), - "*", 1000, null, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(DATASET_ENTITY_NAME), + "*", + 1000, + null, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - Mockito.verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(2)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + Mockito.verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(2)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Platform ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue("urn:li:dataPlatform:kafka").setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion() + .setField("platform") + .setValue("urn:li:dataPlatform:kafka") + .setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion degreeCrit = new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); + Criterion degreeCrit = + new Criterion().setField("degree.keyword").setValue("2").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(degreeCrit)))); Filter filter = new Filter().setOr(conCritArr); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(3)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(3)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, filter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(4)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(4)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); // Environment Filter originFilter = QueryUtils.newFilter("origin", "PROD"); searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); - verify(_lineageSearchService, times(5)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_lineageSearchService, times(5)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); // Cached searchResult = - _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(ENTITY_NAME), - "*", 1000, originFilter, null, 0, 10, null, null, + _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(ENTITY_NAME), + "*", + 1000, + originFilter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(false)); - verify(_graphService, times(1)).getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - eq(1000), eq(0L), eq(1L)); - verify(_lineageSearchService, times(6)).getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); + verify(_graphService, times(1)) + .getLineage( + eq(TEST_URN), + eq(LineageDirection.DOWNSTREAM), + anyInt(), + anyInt(), + eq(1000), + eq(0L), + eq(1L)); + verify(_lineageSearchService, times(6)) + .getLightningSearchResult(any(), any(), anyInt(), anyInt(), anySet()); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(searchResult.getEntities().size(), 0); @@ -640,13 +1049,15 @@ public void testLightningSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - when(_graphService.getLineage(eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), - anyInt())).thenReturn( - mockResult(ImmutableList.of(new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); + when(_graphService.getLineage( + eq(TEST_URN), eq(LineageDirection.DOWNSTREAM), anyInt(), anyInt(), anyInt())) + .thenReturn( + mockResult( + ImmutableList.of( + new LineageRelationship().setEntity(urn).setType("test1").setDegree(1)))); searchResult = searchAcrossLineage(null, testStar); assertEquals(searchResult.getNumEntities().intValue(), 1); - } @Test @@ -660,11 +1071,13 @@ public void testLightningEnvFiltering() throws Exception { platformCounts.put(kafkaPlatform, 200); platformCounts.put(hivePlatform, 50); platformCounts.put(bigQueryPlatform, 100); - List prodLineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List prodLineageRelationships = + constructGraph(platformCounts, FabricType.PROD); // DEV platformCounts.put(kafkaPlatform, 300); - List devLineageRelationships = constructGraph(platformCounts, FabricType.DEV); + List devLineageRelationships = + constructGraph(platformCounts, FabricType.DEV); List lineageRelationships = new ArrayList<>(); lineageRelationships.addAll(prodLineageRelationships); @@ -675,49 +1088,67 @@ public void testLightningEnvFiltering() throws Exception { int size = 10; Set entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // assert that we have the right aggs per env - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("PROD")).findFirst().get(), Long.valueOf(200)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("PROD")) + .findFirst() + .get(), + Long.valueOf(200)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion().setField("origin").setValue("DEV").setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); // assert that if the query has an env filter, it is applied correctly - assertEquals(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") - ).map(x -> x.getAggregations().get("DEV")).findFirst().get(), Long.valueOf(300)); - assertTrue(lineageSearchResult.getMetadata().getAggregations().stream().filter( - x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) - .collect(Collectors.toList()).isEmpty()); - - + assertEquals( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin")) + .map(x -> x.getAggregations().get("DEV")) + .findFirst() + .get(), + Long.valueOf(300)); + assertTrue( + lineageSearchResult.getMetadata().getAggregations().stream() + .filter(x -> x.getName().equals("origin") && x.getAggregations().containsKey("PROD")) + .collect(Collectors.toList()) + .isEmpty()); } - @Test public void testLightningPagination() throws Exception { Map platformCounts = new HashMap<>(); @@ -731,35 +1162,41 @@ public void testLightningPagination() throws Exception { List lineageRelationships = constructGraph(platformCounts); - Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set entityNames = Collections.emptySet(); - LineageSearchResult lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + LineageSearchResult lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); from = 50; size = 20; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(500)); assertEquals(lineageSearchResult.getEntities().size(), 20); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), kafkaPlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name50"); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); @@ -768,25 +1205,31 @@ public void testLightningPagination() throws Exception { from = 500; size = 10; filter = new Filter().setOr(conCritArr); - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - filter, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, filter, from, size, entityNames); assertEquals(lineageSearchResult.getNumEntities(), Integer.valueOf(600)); assertEquals(lineageSearchResult.getEntities().size(), 10); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); - assertEquals(lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); - + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(0), hivePlatform); + assertEquals( + lineageSearchResult.getEntities().get(0).getEntity().getEntityKey().get(1), "name0"); // Verify aggregations from = 0; size = 10; - lineageSearchResult = _lineageSearchService.getLightningSearchResult(lineageRelationships, - null, from, size, entityNames); + lineageSearchResult = + _lineageSearchService.getLightningSearchResult( + lineageRelationships, null, from, size, entityNames); // Static Degree agg is the first element - LongMap platformAggs = lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); - LongMap entityTypeAggs = lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); - LongMap environmentAggs = lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); + LongMap platformAggs = + lineageSearchResult.getMetadata().getAggregations().get(1).getAggregations(); + LongMap entityTypeAggs = + lineageSearchResult.getMetadata().getAggregations().get(2).getAggregations(); + LongMap environmentAggs = + lineageSearchResult.getMetadata().getAggregations().get(3).getAggregations(); assertEquals(platformAggs.get(kafkaPlatform), Long.valueOf(500)); assertEquals(platformAggs.get(hivePlatform), Long.valueOf(100)); assertEquals(platformAggs.get(bigQueryPlatform), Long.valueOf(200)); @@ -798,18 +1241,21 @@ private List constructGraph(Map platformCo return constructGraph(platformCounts, FabricType.PROD); } - private List constructGraph(Map platformCounts, final FabricType env) { + private List constructGraph( + Map platformCounts, final FabricType env) { List lineageRelationships = new ArrayList<>(); - platformCounts.forEach((key, value) -> { - for (int i = 0; i < value; i++) { - try { - lineageRelationships.add( - constructLineageRelationship(new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); - } catch (URISyntaxException e) { - throw new RuntimeException(e); + platformCounts.forEach( + (key, value) -> { + for (int i = 0; i < value; i++) { + try { + lineageRelationships.add( + constructLineageRelationship( + new DatasetUrn(DataPlatformUrn.createFromString(key), "name" + i, env))); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } } - } - }); + }); return lineageRelationships; } @@ -820,19 +1266,40 @@ private LineageRelationship constructLineageRelationship(Urn urn) { .setType("DOWNSTREAM") .setDegree(1) .setPaths(new UrnArrayArray()); - } // Convenience method to reduce spots where we're sending the same params private LineageSearchResult searchAcrossLineage(@Nullable Filter filter, @Nullable String input) { - return _lineageSearchService.searchAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, 0, 10, null, null, + return _lineageSearchService.searchAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + 0, + 10, + null, + null, new SearchFlags().setSkipCache(true)); } - private LineageScrollResult scrollAcrossLineage(@Nullable Filter filter, @Nullable String input, String scrollId, int size) { - return _lineageSearchService.scrollAcrossLineage(TEST_URN, LineageDirection.DOWNSTREAM, ImmutableList.of(), input, - null, filter, null, scrollId, "5m", size, null, null, + private LineageScrollResult scrollAcrossLineage( + @Nullable Filter filter, @Nullable String input, String scrollId, int size) { + return _lineageSearchService.scrollAcrossLineage( + TEST_URN, + LineageDirection.DOWNSTREAM, + ImmutableList.of(), + input, + null, + filter, + null, + scrollId, + "5m", + size, + null, + null, new SearchFlags().setSkipCache(true)); } @@ -851,29 +1318,39 @@ public void testCanDoLightning() throws Exception { platformCounts.put(hivePlatform, 100); platformCounts.put(bigQueryPlatform, 200); - List lineageRelationships = constructGraph(platformCounts, FabricType.PROD); + List lineageRelationships = + constructGraph(platformCounts, FabricType.PROD); Filter filter = QueryUtils.newFilter("platform", kafkaPlatform); int from = 0; int size = 10; Set entityNames = Collections.emptySet(); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); // Set up filters ConjunctiveCriterionArray conCritArr = new ConjunctiveCriterionArray(); - Criterion platform1Crit = new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); - Criterion platform2Crit = new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); + Criterion platform1Crit = + new Criterion().setField("platform").setValue(kafkaPlatform).setCondition(Condition.EQUAL); + Criterion platform2Crit = + new Criterion().setField("platform").setValue(hivePlatform).setCondition(Condition.EQUAL); CriterionArray critArr = new CriterionArray(ImmutableList.of(platform1Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); critArr = new CriterionArray(ImmutableList.of(platform2Crit)); conCritArr.add(new ConjunctiveCriterion().setAnd(critArr)); - Criterion originCrit = new Criterion().setField("origin").setValue(FabricType.PROD.name()).setCondition(Condition.EQUAL); - conCritArr.add(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); + Criterion originCrit = + new Criterion() + .setField("origin") + .setValue(FabricType.PROD.name()) + .setCondition(Condition.EQUAL); + conCritArr.add( + new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(originCrit)))); from = 500; size = 10; filter = new Filter().setOr(conCritArr); - Assert.assertTrue(_lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); + Assert.assertTrue( + _lineageSearchService.canDoLightning(lineageRelationships, "*", filter, null)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index c0144d36843f5..71f35adabce36 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -32,6 +36,7 @@ import com.linkedin.metadata.search.ranker.SimpleRanker; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.cache.CacheManager; import org.springframework.cache.concurrent.ConcurrentMapCacheManager; @@ -40,29 +45,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - - -abstract public class SearchServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SearchServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -85,18 +83,18 @@ public void setup() { } private void resetSearchService() { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - _cacheManager, - _elasticSearchService, - 100, - true); + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService(_cacheManager, _elasticSearchService, 100, true); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); entityDocCountCacheConfiguration.setTtlSeconds(600L); - _searchService = new SearchService( - new EntityDocCountCache(_entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), - cachingEntitySearchService, - new SimpleRanker()); + _searchService = + new SearchService( + new EntityDocCountCache( + _entityRegistry, _elasticSearchService, entityDocCountCacheConfiguration), + cachingEntitySearchService, + new SimpleRanker()); } @BeforeMethod @@ -108,13 +106,26 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildEntitySearchService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); - ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, - getBulkProcessor(), 1); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); + ESWriteDAO writeDAO = + new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); } @@ -126,11 +137,18 @@ private void clearCache() { @Test public void testSearchService() throws Exception { SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true).setSkipCache(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true).setSkipCache(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -143,8 +161,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -158,8 +177,9 @@ public void testSearchService() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); clearCache(); @@ -170,37 +190,46 @@ public void testSearchService() throws Exception { _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "'test2'", null, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), "'test2'", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); } @Test public void testAdvancedSearchOr() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion subtypeCriterion = new Criterion() - .setField("subtypes") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("view"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion subtypeCriterion = + new Criterion() + .setField("subtypes") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("view"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -237,8 +266,15 @@ public void testAdvancedSearchOr() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 2); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(searchResult.getEntities().get(1).getEntity(), urn2); @@ -247,28 +283,38 @@ public void testAdvancedSearchOr() throws Exception { @Test public void testAdvancedSearchSoftDelete() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue("") - .setValues(new StringArray(ImmutableList.of("true"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue("") + .setValues(new StringArray(ImmutableList.of("true"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -308,8 +354,15 @@ public void testAdvancedSearchSoftDelete() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); clearCache(); @@ -317,23 +370,30 @@ public void testAdvancedSearchSoftDelete() throws Exception { @Test public void testAdvancedSearchNegated() throws Exception { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("hive") - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("hive"))); - - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("hive") + .setNegated(true) + .setValues(new StringArray(ImmutableList.of("hive"))); + + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); SearchResult searchResult = - _searchService.searchAcrossEntities(ImmutableList.of(ENTITY_NAME), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + _searchService.searchAcrossEntities( + ImmutableList.of(ENTITY_NAME), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); clearCache(); @@ -373,8 +433,15 @@ public void testAdvancedSearchNegated() throws Exception { syncAfterWrite(getBulkProcessor()); - searchResult = _searchService.searchAcrossEntities(ImmutableList.of(), "test", filterWithCondition, - null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _searchService.searchAcrossEntities( + ImmutableList.of(), + "test", + filterWithCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn3); clearCache(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java index a4c359b3595c2..b544faa061f0e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/TestEntityTestBase.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.search; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.datahub.test.Snapshot; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -23,6 +27,8 @@ import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; @@ -30,29 +36,22 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class TestEntityTestBase extends AbstractTestNGSpringContextTests { +public abstract class TestEntityTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); @Nonnull - abstract protected SearchConfiguration getSearchConfiguration(); + protected abstract SearchConfiguration getSearchConfiguration(); @Nonnull - abstract protected CustomSearchConfiguration getCustomSearchConfiguration(); + protected abstract CustomSearchConfiguration getCustomSearchConfiguration(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -72,7 +71,8 @@ public void setup() { @BeforeClass public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); } @@ -84,10 +84,24 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchService buildService() { EntityIndexBuilders indexBuilders = - new EntityIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), _indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - ESBrowseDAO browseDAO = new ESBrowseDAO(_entityRegistry, getSearchClient(), _indexConvention, getSearchConfiguration(), getCustomSearchConfiguration()); + new EntityIndexBuilders( + getIndexBuilder(), _entityRegistry, _indexConvention, _settingsBuilder); + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + _entityRegistry, + getSearchClient(), + _indexConvention, + getSearchConfiguration(), + getCustomSearchConfiguration()); ESWriteDAO writeDAO = new ESWriteDAO(_entityRegistry, getSearchClient(), _indexConvention, getBulkProcessor(), 1); return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); @@ -95,12 +109,18 @@ private ElasticSearchService buildService() { @Test public void testElasticSearchServiceStructuredQuery() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); BrowseResult browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); ObjectNode document = JsonNodeFactory.instance.objectNode(); @@ -112,10 +132,20 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "foreignKey:Node", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), + "foreignKey:Node", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -125,7 +155,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); @@ -137,7 +169,9 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); @@ -148,23 +182,33 @@ public void testElasticSearchServiceStructuredQuery() throws Exception { assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 1); assertEquals(browseResult.getGroups().get(0).getName(), "b"); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(false)); assertEquals(searchResult.getNumEntities().intValue(), 0); browseResult = _elasticSearchService.browse(ENTITY_NAME, "", null, 0, 10); assertEquals(browseResult.getMetadata().getTotalNumEntities().longValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } @Test public void testElasticSearchServiceFulltext() throws Exception { - SearchResult searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + SearchResult searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); Urn urn = new TestEntityUrn("test", "urn1", "VALUE_1"); @@ -177,13 +221,17 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document.toString(), urn.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 1); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L)); Urn urn2 = new TestEntityUrn("test2", "urn2", "VALUE_2"); ObjectNode document2 = JsonNodeFactory.instance.objectNode(); @@ -194,21 +242,31 @@ public void testElasticSearchServiceFulltext() throws Exception { _elasticSearchService.upsertDocument(ENTITY_NAME, document2.toString(), urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 1); assertEquals(searchResult.getEntities().get(0).getEntity(), urn2); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 2); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), - ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); + assertEquals( + _elasticSearchService.aggregateByValue( + ImmutableList.of(ENTITY_NAME), "textFieldOverride", null, 10), + ImmutableMap.of("textFieldOverride", 1L, "textFieldOverride2", 1L)); _elasticSearchService.deleteDocument(ENTITY_NAME, urn.toString()); _elasticSearchService.deleteDocument(ENTITY_NAME, urn2.toString()); syncAfterWrite(getBulkProcessor()); - searchResult = _elasticSearchService.search(List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); + searchResult = + _elasticSearchService.search( + List.of(ENTITY_NAME), "test2", null, null, 0, 10, new SearchFlags().setFulltext(true)); assertEquals(searchResult.getNumEntities().intValue(), 0); assertEquals(_elasticSearchService.docCount(ENTITY_NAME), 0); - assertEquals(_elasticSearchService.aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10).size(), 0); + assertEquals( + _elasticSearchService + .aggregateByValue(ImmutableList.of(ENTITY_NAME), "textField", null, 10) + .size(), + 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java index 354b7dc5f609e..175c48e198185 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/cache/CacheableSearcherTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.search.cache; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.Streams; import com.linkedin.common.urn.TestEntityUrn; import com.linkedin.common.urn.Urn; @@ -18,18 +21,19 @@ import org.springframework.cache.concurrent.ConcurrentMapCacheManager; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class CacheableSearcherTest { private CacheManager cacheManager = new ConcurrentMapCacheManager(); @Test public void testCacheableSearcherWhenEmpty() { CacheableSearcher emptySearcher = - new CacheableSearcher<>(cacheManager.getCache("emptySearcher"), 10, this::getEmptySearchResult, - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("emptySearcher"), + 10, + this::getEmptySearchResult, + CacheableSearcher.QueryPagination::getFrom, + null, + true); assertTrue(emptySearcher.getSearchResults(0, 0).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(0, 10).getEntities().isEmpty()); assertTrue(emptySearcher.getSearchResults(5, 10).getEntities().isEmpty()); @@ -38,8 +42,13 @@ public void testCacheableSearcherWhenEmpty() { @Test public void testCacheableSearcherWithFixedNumResults() { CacheableSearcher fixedBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("fixedBatchSearcher"), 10, qs -> getSearchResult(qs, 10), - CacheableSearcher.QueryPagination::getFrom, null, true); + new CacheableSearcher<>( + cacheManager.getCache("fixedBatchSearcher"), + 10, + qs -> getSearchResult(qs, 10), + CacheableSearcher.QueryPagination::getFrom, + null, + true); SearchResult result = fixedBatchSearcher.getSearchResults(0, 0); assertTrue(result.getEntities().isEmpty()); @@ -48,21 +57,28 @@ public void testCacheableSearcherWithFixedNumResults() { result = fixedBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = fixedBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test public void testCacheableSearcherWithVariableNumResults() { CacheableSearcher variableBatchSearcher = - new CacheableSearcher<>(cacheManager.getCache("variableBatchSearcher"), 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + cacheManager.getCache("variableBatchSearcher"), + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, true); SearchResult result = variableBatchSearcher.getSearchResults(0, 0); @@ -72,21 +88,30 @@ public void testCacheableSearcherWithVariableNumResults() { result = variableBatchSearcher.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); result = variableBatchSearcher.getSearchResults(5, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat(getUrns(5, 10).stream(), getUrns(0, 5).stream()) + .collect(Collectors.toList())); result = variableBatchSearcher.getSearchResults(5, 100); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 100); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), - Streams.concat(getUrns(5, 10).stream(), getUrns(0, 20).stream(), getUrns(0, 30).stream(), - getUrns(0, 40).stream(), getUrns(0, 5).stream()).collect(Collectors.toList())); + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + Streams.concat( + getUrns(5, 10).stream(), + getUrns(0, 20).stream(), + getUrns(0, 30).stream(), + getUrns(0, 40).stream(), + getUrns(0, 5).stream()) + .collect(Collectors.toList())); } @Test @@ -94,26 +119,36 @@ public void testCacheableSearcherEnabled() { // Verify cache is not interacted with when cache disabled Cache mockCache = Mockito.mock(Cache.class); CacheableSearcher cacheDisabled = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, null, + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, false); SearchResult result = cacheDisabled.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verifyNoInteractions(mockCache); Mockito.reset(mockCache); // Verify cache is updated when cache enabled, but skip cache passed through CacheableSearcher skipCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(true), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(true), + true); result = skipCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(0)).get(Mockito.any(), Mockito.any(Class.class)); @@ -121,13 +156,18 @@ public void testCacheableSearcherEnabled() { // Test cache hit when searchFlags is null CacheableSearcher nullFlags = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - null, true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + null, + true); result = nullFlags.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); @@ -135,20 +175,26 @@ public void testCacheableSearcherEnabled() { // Test cache hit when skipCache is false CacheableSearcher useCache = - new CacheableSearcher<>(mockCache, 10, - qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), CacheableSearcher.QueryPagination::getFrom, - new SearchFlags().setSkipCache(false), true); + new CacheableSearcher<>( + mockCache, + 10, + qs -> getSearchResult(qs, qs.getFrom() + qs.getSize()), + CacheableSearcher.QueryPagination::getFrom, + new SearchFlags().setSkipCache(false), + true); result = useCache.getSearchResults(0, 10); assertEquals(result.getNumEntities().intValue(), 1000); assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), + assertEquals( + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()), getUrns(0, 10)); Mockito.verify(mockCache, Mockito.times(1)).put(Mockito.any(), Mockito.any()); Mockito.verify(mockCache, Mockito.times(1)).get(Mockito.any(), Mockito.any(Class.class)); } private SearchResult getEmptySearchResult(CacheableSearcher.QueryPagination queryPagination) { - return new SearchResult().setEntities(new SearchEntityArray()) + return new SearchResult() + .setEntities(new SearchEntityArray()) .setNumEntities(0) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) @@ -161,11 +207,15 @@ private List getUrns(int start, int end) { .collect(Collectors.toList()); } - private SearchResult getSearchResult(CacheableSearcher.QueryPagination queryPagination, int batchSize) { + private SearchResult getSearchResult( + CacheableSearcher.QueryPagination queryPagination, int batchSize) { assert (batchSize <= queryPagination.getSize()); List entities = - getUrns(0, batchSize).stream().map(urn -> new SearchEntity().setEntity(urn)).collect(Collectors.toList()); - return new SearchResult().setEntities(new SearchEntityArray(entities)) + getUrns(0, batchSize).stream() + .map(urn -> new SearchEntity().setEntity(urn)) + .collect(Collectors.toList()); + return new SearchResult() + .setEntities(new SearchEntityArray(entities)) .setNumEntities(1000) .setFrom(queryPagination.getFrom()) .setPageSize(queryPagination.getSize()) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java index 750423a024dcc..0810bbc9d19f8 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/ElasticSearchSuite.java @@ -7,26 +7,26 @@ import org.testcontainers.containers.GenericContainer; import org.testng.annotations.AfterSuite; - @TestConfiguration public class ElasticSearchSuite extends AbstractTestNGSpringContextTests { - private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; - private static GenericContainer container; - static { - ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); - } + private static final ElasticsearchTestContainer ELASTICSEARCH_TEST_CONTAINER; + private static GenericContainer container; - @AfterSuite - public void after() { - ELASTICSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + ELASTICSEARCH_TEST_CONTAINER = new ElasticsearchTestContainer(); + } + + @AfterSuite + public void after() { + ELASTICSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer testSearchContainer() { - if (container == null) { - container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer testSearchContainer() { + if (container == null) { + container = ELASTICSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java index cfacd4c15409a..ea5b9a74b476e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/GoldenElasticSearchTest.java @@ -1,9 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.GoldenTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.GoldenTestBase; +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenElasticSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java index 20f4ee52f0e62..911a21767bdea 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/IndexBuilderElasticSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderElasticSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java index 0cb49bc555421..1fed3380a342d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageDataFixtureElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; -import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.search.fixtures.LineageDataFixtureTestBase; +import io.datahubproject.test.fixtures.search.SearchLineageFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureElasticSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java index 613ec5a26ff66..8c4195f9ff534 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/LineageServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.LineageServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; @@ -14,20 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceElasticSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java index 855f46d239118..eea352a866042 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SampleDataFixtureElasticSearchTest.java @@ -1,11 +1,12 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.SampleDataFixtureTestBase; import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; - import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; @@ -14,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Elasticsearch test container - */ +/** Runs sample data fixture tests for Elasticsearch test container */ @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureElasticSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java index 1a6a20cd9df9d..e5af1978be5d2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchDAOElasticSearchTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.search.elasticsearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; -import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; - +import io.datahubproject.test.fixtures.search.SampleDataFixtureConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import lombok.Getter; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Import; - import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({ElasticSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOElasticSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java index a9e9feac28007..7133971847f98 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SearchServiceElasticSearchTest.java @@ -1,11 +1,11 @@ package com.linkedin.metadata.search.elasticsearch; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.search.SearchServiceTestBase; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import com.linkedin.metadata.search.SearchServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceElasticSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java index 7365887fb9b2e..a23cd5b051ecb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/SystemMetadataServiceElasticSearchTest.java @@ -1,8 +1,8 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.systemmetadata.SystemMetadataServiceTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceElasticSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java index bec610b20dca1..843da17fbd132 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TestEntityElasticSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({ElasticSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + ElasticSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityElasticSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java index 5b85904edc923..6ebe42d0181e4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/elasticsearch/TimeseriesAspectServiceElasticSearchTest.java @@ -1,9 +1,9 @@ package com.linkedin.metadata.search.elasticsearch; -import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.timeseries.search.TimeseriesAspectServiceTestBase; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -14,12 +14,9 @@ @Import({ElasticSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceElasticSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index ed81f3cebd027..fba9d5359d29f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertTrue; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -8,151 +13,165 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossCustomEntities; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertTrue; -import static org.testng.AssertJUnit.assertNotNull; +public abstract class GoldenTestBase extends AbstractTestNGSpringContextTests { + + private static final List SEARCHABLE_LONGTAIL_ENTITIES = + Stream.of( + EntityType.CHART, + EntityType.CONTAINER, + EntityType.DASHBOARD, + EntityType.DATASET, + EntityType.DOMAIN, + EntityType.TAG) + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); -abstract public class GoldenTestBase extends AbstractTestNGSpringContextTests { - - private static final List SEARCHABLE_LONGTAIL_ENTITIES = Stream.of(EntityType.CHART, EntityType.CONTAINER, - EntityType.DASHBOARD, EntityType.DATASET, EntityType.DOMAIN, EntityType.TAG - ).map(EntityTypeMapper::getName) - .collect(Collectors.toList()); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Test - public void testNameMatchPetProfiles() { - /* - Searching for "pet profiles" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - assertNotNull(getEntityRegistry()); - SearchResult searchResult = searchAcrossCustomEntities(getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testNameMatchPetProfile() { - /* - Searching for "pet profile" should return "pet_profiles" as the first 2 search results - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("pet_profiles")); - assertTrue(secondResultUrn.toString().contains("pet_profiles")); - } - - @Test - public void testGlossaryTerms() { - /* - Searching for "ReturnRate" should return all tables that have the glossary term applied before - anything else - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); - SearchEntityArray entities = searchResult.getEntities(); - assertTrue(searchResult.getEntities().size() >= 4); - MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); - MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); - MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); - MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); - - assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); - assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); - } - - @Test - public void testNameMatchPartiallyQualified() { - /* - Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table - name as the first search results before any others - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - assertTrue(firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); - assertTrue(secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); - } - - @Test - public void testNameMatchCollaborativeActionitems() { - /* - Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search - result, followed by "collaborative_actionitems_old" - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); - assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } - - @Test - public void testNameMatchCustomerOrders() { - /* - Searching for "customer orders" should return "customer_orders" as the first search - result, not suffixed by anything - */ - assertNotNull(getSearchService()); - SearchResult searchResult = searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); - assertTrue(searchResult.getEntities().size() >= 2); - Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); - - // Checks that the table name is not suffixed with anything - assertTrue(firstResultUrn.toString().contains("customer_orders,"), - "Expected firstResultUrn to contain `customer_orders,` but results are " - + searchResult.getEntities().stream() - .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) - .collect(Collectors.joining(", "))); - - Double firstResultScore = searchResult.getEntities().get(0).getScore(); - Double secondResultScore = searchResult.getEntities().get(1).getScore(); - - // Checks that the scores aren't tied so that we are matching on table name more than column name - assertTrue(firstResultScore > secondResultScore); - } + @Nonnull + protected abstract SearchService getSearchService(); + @Test + public void testNameMatchPetProfiles() { + /* + Searching for "pet profiles" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + assertNotNull(getEntityRegistry()); + SearchResult searchResult = + searchAcrossCustomEntities( + getSearchService(), "pet profiles", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testNameMatchPetProfile() { + /* + Searching for "pet profile" should return "pet_profiles" as the first 2 search results + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "pet profile", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue(firstResultUrn.toString().contains("pet_profiles")); + assertTrue(secondResultUrn.toString().contains("pet_profiles")); + } + + @Test + public void testGlossaryTerms() { + /* + Searching for "ReturnRate" should return all tables that have the glossary term applied before + anything else + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "ReturnRate", SEARCHABLE_LONGTAIL_ENTITIES); + SearchEntityArray entities = searchResult.getEntities(); + assertTrue(searchResult.getEntities().size() >= 4); + MatchedFieldArray firstResultMatchedFields = entities.get(0).getMatchedFields(); + MatchedFieldArray secondResultMatchedFields = entities.get(1).getMatchedFields(); + MatchedFieldArray thirdResultMatchedFields = entities.get(2).getMatchedFields(); + MatchedFieldArray fourthResultMatchedFields = entities.get(3).getMatchedFields(); + + assertTrue(firstResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(secondResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(thirdResultMatchedFields.toString().contains("ReturnRate")); + assertTrue(fourthResultMatchedFields.toString().contains("ReturnRate")); + } + + @Test + public void testNameMatchPartiallyQualified() { + /* + Searching for "analytics.pet_details" (partially qualified) should return the fully qualified table + name as the first search results before any others + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "analytics.pet_details", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + assertTrue( + firstResultUrn.toString().contains("snowflake,long_tail_companions.analytics.pet_details")); + assertTrue( + secondResultUrn.toString().contains("dbt,long_tail_companions.analytics.pet_details")); + } + + @Test + public void testNameMatchCollaborativeActionitems() { + /* + Searching for "collaborative actionitems" should return "collaborative_actionitems" as the first search + result, followed by "collaborative_actionitems_old" + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities( + getSearchService(), "collaborative actionitems", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + Urn secondResultUrn = searchResult.getEntities().get(1).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue(firstResultUrn.toString().contains("collaborative_actionitems,")); + assertTrue(secondResultUrn.toString().contains("collaborative_actionitems_old")); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + @Test + public void testNameMatchCustomerOrders() { /* - Tests that should pass but do not yet can be added below here, with the following annotation: - @Test(enabled = false) - */ + Searching for "customer orders" should return "customer_orders" as the first search + result, not suffixed by anything + */ + assertNotNull(getSearchService()); + SearchResult searchResult = + searchAcrossEntities(getSearchService(), "customer orders", SEARCHABLE_LONGTAIL_ENTITIES); + assertTrue(searchResult.getEntities().size() >= 2); + Urn firstResultUrn = searchResult.getEntities().get(0).getEntity(); + + // Checks that the table name is not suffixed with anything + assertTrue( + firstResultUrn.toString().contains("customer_orders,"), + "Expected firstResultUrn to contain `customer_orders,` but results are " + + searchResult.getEntities().stream() + .map(e -> String.format("(Score: %s Urn: %s)", e.getScore(), e.getEntity().getId())) + .collect(Collectors.joining(", "))); + + Double firstResultScore = searchResult.getEntities().get(0).getScore(); + Double secondResultScore = searchResult.getEntities().get(1).getScore(); + + // Checks that the scores aren't tied so that we are matching on table name more than column + // name + assertTrue(firstResultScore > secondResultScore); + } + + /* + Tests that should pass but do not yet can be added below here, with the following annotation: + @Test(enabled = false) + */ } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index eaf8feedeb6ed..44fe5ea8ac9ae 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -1,61 +1,64 @@ package com.linkedin.metadata.search.fixtures; +import static io.datahubproject.test.search.SearchTestUtils.lineage; +import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; +import java.net.URISyntaxException; +import javax.annotation.Nonnull; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.net.URISyntaxException; - -import static io.datahubproject.test.search.SearchTestUtils.lineage; -import static io.datahubproject.test.search.SearchTestUtils.searchAcrossEntities; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - -abstract public class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - - @Nonnull - abstract protected LineageSearchService getLineageService(); +public abstract class LineageDataFixtureTestBase extends AbstractTestNGSpringContextTests { - @Nonnull - abstract protected SearchService getSearchService(); + @Nonnull + protected abstract LineageSearchService getLineageService(); + @Nonnull + protected abstract SearchService getSearchService(); - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(noResult.getEntities().size(), 0); + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(noResult.getEntities().size(), 0); - SearchResult result = searchAcrossEntities(getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); - assertEquals(result.getEntities().size(), 1); + SearchResult result = + searchAcrossEntities( + getSearchService(), "e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8"); + assertEquals(result.getEntities().size(), 1); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - LineageSearchResult lineageResult = lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); - assertEquals(lineageResult.getEntities().size(), 10); - } + LineageSearchResult lineageResult = + lineage(getLineageService(), result.getEntities().get(0).getEntity(), 1); + assertEquals(lineageResult.getEntities().size(), 10); + } - @Test - public void testDatasetLineage() throws URISyntaxException { - Urn testUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); + @Test + public void testDatasetLineage() throws URISyntaxException { + Urn testUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:9cf8c96,e3859789eed1cef55288b44f016ee08290d9fd08973e565c112d8,PROD)"); - // 1 hops - LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); - assertEquals(lineageResult.getEntities().size(), 10); + // 1 hops + LineageSearchResult lineageResult = lineage(getLineageService(), testUrn, 1); + assertEquals(lineageResult.getEntities().size(), 10); - // 2 hops - lineageResult = lineage(getLineageService(), testUrn, 2); - assertEquals(lineageResult.getEntities().size(), 5); + // 2 hops + lineageResult = lineage(getLineageService(), testUrn, 2); + assertEquals(lineageResult.getEntities().size(), 5); - // 3 hops - lineageResult = lineage(getLineageService(), testUrn, 3); - assertEquals(lineageResult.getEntities().size(), 12); - } + // 3 hops + lineageResult = lineage(getLineageService(), testUrn, 3); + assertEquals(lineageResult.getEntities().size(), 12); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index 69dd5c80bef1d..a1af2325ee0ed 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -1,5 +1,16 @@ package com.linkedin.metadata.search.fixtures; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static io.datahubproject.test.search.SearchTestUtils.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertSame; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -32,6 +43,16 @@ import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.r2.RemoteInvocationException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; +import javax.annotation.Nonnull; import org.junit.Assert; import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; @@ -45,1470 +66,1882 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder.STRUCTURED_QUERY_PREFIX; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static io.datahubproject.test.search.SearchTestUtils.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertSame; -import static org.testng.Assert.assertTrue; - -abstract public class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { - protected static final Authentication AUTHENTICATION = - new Authentication(new Actor(ActorType.USER, "test"), ""); - - @Nonnull - abstract protected EntityRegistry getEntityRegistry(); - - @Nonnull - abstract protected SearchService getSearchService(); - - @Nonnull - abstract protected EntityClient getEntityClient(); - - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); - - @Test - public void testSearchFieldConfig() throws IOException { - /* - For every field in every entity fixture, ensure proper detection of field types and analyzers - */ - Map fixtureEntities = new HashMap<>(); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); - fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); - - for (Map.Entry entry : fixtureEntities.entrySet()) { - EntitySpec entitySpec = entry.getKey(); - GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); - - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map> mappings = (Map>) resp.mappings() - .get(entry.getValue()).sourceAsMap().get("properties"); - - // For every fieldSpec determine whether the SearchFieldConfig is accurate - for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { - SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); - - if (!test.fieldName().contains(".")) { - Map actual = mappings.get(test.fieldName()); - - final String expectedAnalyzer; - if (actual.get("search_analyzer") != null) { - expectedAnalyzer = (String) actual.get("search_analyzer"); - } else if (actual.get("analyzer") != null) { - expectedAnalyzer = (String) actual.get("analyzer"); - } else { - expectedAnalyzer = "keyword"; - } - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for entity: `%s`field: `%s`", - entitySpec.getName(), test.fieldName())); - - if (test.hasDelimitedSubfield()) { - assertTrue(((Map>) actual.get("fields")).containsKey("delimited"), - String.format("Expected entity: `%s` field to have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map>) actual.get("fields")).containsKey("delimited"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .delimited subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - if (test.hasKeywordSubfield()) { - assertTrue(((Map>) actual.get("fields")).containsKey("keyword"), - String.format("Expected entity: `%s` field to have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } else { - boolean nosubfield = !actual.containsKey("fields") - || !((Map>) actual.get("fields")).containsKey("keyword"); - assertTrue(nosubfield, String.format("Expected entity: `%s` field to NOT have .keyword subfield: `%s`", - entitySpec.getName(), test.fieldName())); - } - } else { - // this is a subfield therefore cannot have a subfield - assertFalse(test.hasKeywordSubfield()); - assertFalse(test.hasDelimitedSubfield()); - assertFalse(test.hasWordGramSubfields()); - - String[] fieldAndSubfield = test.fieldName().split("[.]", 2); - - Map actualParent = mappings.get(fieldAndSubfield[0]); - Map actualSubfield = ((Map>) actualParent.get("fields")).get(fieldAndSubfield[0]); - - String expectedAnalyzer = actualSubfield.get("search_analyzer") != null ? (String) actualSubfield.get("search_analyzer") - : "keyword"; - - assertEquals(test.analyzer(), expectedAnalyzer, - String.format("Expected search analyzer to match for field `%s`", test.fieldName())); - } - } +public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringContextTests { + protected static final Authentication AUTHENTICATION = + new Authentication(new Actor(ActorType.USER, "test"), ""); + + @Nonnull + protected abstract EntityRegistry getEntityRegistry(); + + @Nonnull + protected abstract SearchService getSearchService(); + + @Nonnull + protected abstract EntityClient getEntityClient(); + + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); + + @Test + public void testSearchFieldConfig() throws IOException { + /* + For every field in every entity fixture, ensure proper detection of field types and analyzers + */ + Map fixtureEntities = new HashMap<>(); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataset"), "smpldat_datasetindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("chart"), "smpldat_chartindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("container"), "smpldat_containerindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("corpgroup"), "smpldat_corpgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("corpuser"), "smpldat_corpuserindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("dashboard"), "smpldat_dashboardindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("dataflow"), "smpldat_dataflowindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("datajob"), "smpldat_datajobindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("domain"), "smpldat_domainindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossarynode"), "smpldat_glossarynodeindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("glossaryterm"), "smpldat_glossarytermindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeature"), "smpldat_mlfeatureindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlfeaturetable"), "smpldat_mlfeaturetableindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlmodelgroup"), "smpldat_mlmodelgroupindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("mlmodel"), "smpldat_mlmodelindex_v2"); + fixtureEntities.put( + getEntityRegistry().getEntitySpec("mlprimarykey"), "smpldat_mlprimarykeyindex_v2"); + fixtureEntities.put(getEntityRegistry().getEntitySpec("tag"), "smpldat_tagindex_v2"); + + for (Map.Entry entry : fixtureEntities.entrySet()) { + EntitySpec entitySpec = entry.getKey(); + GetMappingsRequest req = new GetMappingsRequest().indices(entry.getValue()); + + GetMappingsResponse resp = + getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map> mappings = + (Map>) + resp.mappings().get(entry.getValue()).sourceAsMap().get("properties"); + + // For every fieldSpec determine whether the SearchFieldConfig is accurate + for (SearchableFieldSpec fieldSpec : entitySpec.getSearchableFieldSpecs()) { + SearchFieldConfig test = SearchFieldConfig.detectSubFieldType(fieldSpec); + + if (!test.fieldName().contains(".")) { + Map actual = mappings.get(test.fieldName()); + + final String expectedAnalyzer; + if (actual.get("search_analyzer") != null) { + expectedAnalyzer = (String) actual.get("search_analyzer"); + } else if (actual.get("analyzer") != null) { + expectedAnalyzer = (String) actual.get("analyzer"); + } else { + expectedAnalyzer = "keyword"; + } + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format( + "Expected search analyzer to match for entity: `%s`field: `%s`", + entitySpec.getName(), test.fieldName())); + + if (test.hasDelimitedSubfield()) { + assertTrue( + ((Map>) actual.get("fields")).containsKey("delimited"), + String.format( + "Expected entity: `%s` field to have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map>) actual.get("fields")) + .containsKey("delimited"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .delimited subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + if (test.hasKeywordSubfield()) { + assertTrue( + ((Map>) actual.get("fields")).containsKey("keyword"), + String.format( + "Expected entity: `%s` field to have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } else { + boolean nosubfield = + !actual.containsKey("fields") + || !((Map>) actual.get("fields")) + .containsKey("keyword"); + assertTrue( + nosubfield, + String.format( + "Expected entity: `%s` field to NOT have .keyword subfield: `%s`", + entitySpec.getName(), test.fieldName())); + } + } else { + // this is a subfield therefore cannot have a subfield + assertFalse(test.hasKeywordSubfield()); + assertFalse(test.hasDelimitedSubfield()); + assertFalse(test.hasWordGramSubfields()); + + String[] fieldAndSubfield = test.fieldName().split("[.]", 2); + + Map actualParent = mappings.get(fieldAndSubfield[0]); + Map actualSubfield = + ((Map>) actualParent.get("fields")) + .get(fieldAndSubfield[0]); + + String expectedAnalyzer = + actualSubfield.get("search_analyzer") != null + ? (String) actualSubfield.get("search_analyzer") + : "keyword"; + + assertEquals( + test.analyzer(), + expectedAnalyzer, + String.format("Expected search analyzer to match for field `%s`", test.fieldName())); } + } } - - @Test - public void testGetSortOrder() { - String dateFieldName = "lastOperationTime"; - List entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); - List entitySpecs = entityNamesToTestSearch.stream().map( - name -> getEntityRegistry().getEntitySpec(name)) + } + + @Test + public void testGetSortOrder() { + String dateFieldName = "lastOperationTime"; + List entityNamesToTestSearch = List.of("dataset", "chart", "corpgroup"); + List entitySpecs = + entityNamesToTestSearch.stream() + .map(name -> getEntityRegistry().getEntitySpec(name)) .collect(Collectors.toList()); - SearchSourceBuilder builder = new SearchSourceBuilder(); - SortCriterion sortCriterion = new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); - ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); - List> sorts = builder.sorts(); - assertEquals(sorts.size(), 2); // sort by last modified and then by urn - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); - assertEquals(fieldSortBuilder.unmappedType(), "date"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } - - // Test alias field - String entityNameField = "_entityName"; - SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); - SortCriterion nameCriterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); - ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); - sorts = nameBuilder.sorts(); - assertEquals(sorts.size(), 2); - for (SortBuilder sort : sorts) { - assertTrue(sort instanceof FieldSortBuilder); - FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; - if (fieldSortBuilder.getFieldName().equals(entityNameField)) { - assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); - assertEquals(fieldSortBuilder.unmappedType(), "keyword"); - } else { - assertEquals(fieldSortBuilder.getFieldName(), "urn"); - } - } + SearchSourceBuilder builder = new SearchSourceBuilder(); + SortCriterion sortCriterion = + new SortCriterion().setOrder(SortOrder.DESCENDING).setField(dateFieldName); + ESUtils.buildSortOrder(builder, sortCriterion, entitySpecs); + List> sorts = builder.sorts(); + assertEquals(sorts.size(), 2); // sort by last modified and then by urn + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(dateFieldName)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.DESC); + assertEquals(fieldSortBuilder.unmappedType(), "date"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - @Test - public void testDatasetHasTags() throws IOException { - GetMappingsRequest req = new GetMappingsRequest() - .indices("smpldat_datasetindex_v2"); - GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); - Map> mappings = (Map>) resp.mappings() - .get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); - assertTrue(mappings.containsKey("hasTags")); - assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + // Test alias field + String entityNameField = "_entityName"; + SearchSourceBuilder nameBuilder = new SearchSourceBuilder(); + SortCriterion nameCriterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField(entityNameField); + ESUtils.buildSortOrder(nameBuilder, nameCriterion, entitySpecs); + sorts = nameBuilder.sorts(); + assertEquals(sorts.size(), 2); + for (SortBuilder sort : sorts) { + assertTrue(sort instanceof FieldSortBuilder); + FieldSortBuilder fieldSortBuilder = (FieldSortBuilder) sort; + if (fieldSortBuilder.getFieldName().equals(entityNameField)) { + assertEquals(fieldSortBuilder.order(), org.opensearch.search.sort.SortOrder.ASC); + assertEquals(fieldSortBuilder.unmappedType(), "keyword"); + } else { + assertEquals(fieldSortBuilder.getFieldName(), "urn"); + } } - - @Test - public void testFixtureInitialization() { - assertNotNull(getSearchService()); - SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); - assertEquals(0, noResult.getEntities().size()); - - final SearchResult result = searchAcrossEntities(getSearchService(), "test"); - - Map expectedTypes = Map.of( - "dataset", 13, - "chart", 0, - "container", 1, - "dashboard", 0, - "tag", 0, - "mlmodel", 0 - ); - - Map> actualTypes = new HashMap<>(); - for (String key : expectedTypes.keySet()) { - actualTypes.put(key, result.getEntities().stream() - .map(SearchEntity::getEntity).filter(entity -> key.equals(entity.getEntityType())).collect(Collectors.toList())); - } - - expectedTypes.forEach((key, value) -> - assertEquals(actualTypes.get(key).size(), value.intValue(), - String.format("Expected entity `%s` matches for %s. Found %s", value, key, - result.getEntities().stream() - .filter(e -> e.getEntity().getEntityType().equals(key)) - .map(e -> e.getEntity().getEntityKey()) - .collect(Collectors.toList())))); + } + + @Test + public void testDatasetHasTags() throws IOException { + GetMappingsRequest req = new GetMappingsRequest().indices("smpldat_datasetindex_v2"); + GetMappingsResponse resp = getSearchClient().indices().getMapping(req, RequestOptions.DEFAULT); + Map> mappings = + (Map>) + resp.mappings().get("smpldat_datasetindex_v2").sourceAsMap().get("properties"); + assertTrue(mappings.containsKey("hasTags")); + assertEquals(mappings.get("hasTags"), Map.of("type", "boolean")); + } + + @Test + public void testFixtureInitialization() { + assertNotNull(getSearchService()); + SearchResult noResult = searchAcrossEntities(getSearchService(), "no results"); + assertEquals(0, noResult.getEntities().size()); + + final SearchResult result = searchAcrossEntities(getSearchService(), "test"); + + Map expectedTypes = + Map.of( + "dataset", 13, + "chart", 0, + "container", 1, + "dashboard", 0, + "tag", 0, + "mlmodel", 0); + + Map> actualTypes = new HashMap<>(); + for (String key : expectedTypes.keySet()) { + actualTypes.put( + key, + result.getEntities().stream() + .map(SearchEntity::getEntity) + .filter(entity -> key.equals(entity.getEntityType())) + .collect(Collectors.toList())); } - @Test - public void testDataPlatform() { - Map expected = ImmutableMap.builder() - .put("urn:li:dataPlatform:BigQuery", 8) - .put("urn:li:dataPlatform:hive", 3) - .put("urn:li:dataPlatform:mysql", 5) - .put("urn:li:dataPlatform:s3", 1) - .put("urn:li:dataPlatform:hdfs", 1) - .put("urn:li:dataPlatform:graph", 1) - .put("urn:li:dataPlatform:dbt", 9) - .put("urn:li:dataplatform:BigQuery", 8) - .put("urn:li:dataplatform:hive", 3) - .put("urn:li:dataplatform:mysql", 5) - .put("urn:li:dataplatform:s3", 1) - .put("urn:li:dataplatform:hdfs", 1) - .put("urn:li:dataplatform:graph", 1) - .put("urn:li:dataplatform:dbt", 9) - .build(); - - expected.forEach((key, value) -> { - SearchResult result = searchAcrossEntities(getSearchService(), key); - assertEquals(result.getEntities().size(), value.intValue(), - String.format("Unexpected data platform `%s` hits.", key)); // max is 100 without pagination + expectedTypes.forEach( + (key, value) -> + assertEquals( + actualTypes.get(key).size(), + value.intValue(), + String.format( + "Expected entity `%s` matches for %s. Found %s", + value, + key, + result.getEntities().stream() + .filter(e -> e.getEntity().getEntityType().equals(key)) + .map(e -> e.getEntity().getEntityKey()) + .collect(Collectors.toList())))); + } + + @Test + public void testDataPlatform() { + Map expected = + ImmutableMap.builder() + .put("urn:li:dataPlatform:BigQuery", 8) + .put("urn:li:dataPlatform:hive", 3) + .put("urn:li:dataPlatform:mysql", 5) + .put("urn:li:dataPlatform:s3", 1) + .put("urn:li:dataPlatform:hdfs", 1) + .put("urn:li:dataPlatform:graph", 1) + .put("urn:li:dataPlatform:dbt", 9) + .put("urn:li:dataplatform:BigQuery", 8) + .put("urn:li:dataplatform:hive", 3) + .put("urn:li:dataplatform:mysql", 5) + .put("urn:li:dataplatform:s3", 1) + .put("urn:li:dataplatform:hdfs", 1) + .put("urn:li:dataplatform:graph", 1) + .put("urn:li:dataplatform:dbt", 9) + .build(); + + expected.forEach( + (key, value) -> { + SearchResult result = searchAcrossEntities(getSearchService(), key); + assertEquals( + result.getEntities().size(), + value.intValue(), + String.format( + "Unexpected data platform `%s` hits.", key)); // max is 100 without pagination }); - } - - @Test - public void testUrn() { + } + + @Test + public void testUrn() { + List.of( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", + "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", + "urn:li:chart:(looker,baz1)", + "urn:li:dashboard:(looker,baz)", + "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", + "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)") + .forEach( + query -> + assertTrue( + searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, + String.format("Unexpected >1 urn result for `%s`", query))); + } + + @Test + public void testExactTable() { + SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); + assertEquals( + results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); + assertEquals( + results.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); + } + + @Test + public void testStemming() { + List> testSets = List.of( - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.austin311_derived,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:graph,graph-test,PROD)", - "urn:li:chart:(looker,baz1)", - "urn:li:dashboard:(looker,baz)", - "urn:li:mlFeature:(test_feature_table_all_feature_dtypes,test_BOOL_LIST_feature)", - "urn:li:mlModel:(urn:li:dataPlatform:science,scienceModel,PROD)" - ).forEach(query -> - assertTrue(searchAcrossEntities(getSearchService(), query).getEntities().size() >= 1, - String.format("Unexpected >1 urn result for `%s`", query)) - ); - } - - @Test - public void testExactTable() { - SearchResult results = searchAcrossEntities(getSearchService(), "stg_customers"); - assertEquals(results.getEntities().size(), 1, "Unexpected single urn result for `stg_customers`"); - assertEquals(results.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)"); - } - - @Test - public void testStemming() { - List> testSets = List.of( - Set.of("log", "logs", "logging"), - Set.of("border", "borders", "bordered", "bordering"), - Set.of("indicates", "indicate", "indicated") - ); - - testSets.forEach(testSet -> { - Integer expectedResults = null; - for (String testQuery : testSet) { - SearchResult results = searchAcrossEntities(getSearchService(), testQuery); - - assertTrue(results.hasEntities() && !results.getEntities().isEmpty(), - String.format("Expected search results for `%s`", testQuery)); - if (expectedResults == null) { - expectedResults = results.getNumEntities(); - } - assertEquals(expectedResults, results.getNumEntities(), - String.format("Expected all result counts to match after stemming. %s", testSet)); + Set.of("log", "logs", "logging"), + Set.of("border", "borders", "bordered", "bordering"), + Set.of("indicates", "indicate", "indicated")); + + testSets.forEach( + testSet -> { + Integer expectedResults = null; + for (String testQuery : testSet) { + SearchResult results = searchAcrossEntities(getSearchService(), testQuery); + + assertTrue( + results.hasEntities() && !results.getEntities().isEmpty(), + String.format("Expected search results for `%s`", testQuery)); + if (expectedResults == null) { + expectedResults = results.getNumEntities(); } - }); - } - - @Test - public void testStemmingOverride() throws IOException { - Set testSet = Set.of("customer", "customers"); - - Set results = testSet.stream() - .map(test -> searchAcrossEntities(getSearchService(), test)) - .collect(Collectors.toSet()); - - results.forEach(r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, + assertEquals( + expectedResults, + results.getNumEntities(), String.format("Expected all result counts to match after stemming. %s", testSet)); - - // Additional inspect token - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "customers" - ); - - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); - } - - @Test - public void testDelimitedSynonym() throws IOException { - List expectedTokens = List.of("cac"); - List analyzers = List.of( - "urn_component", - "word_delimited", - "query_urn_component", - "query_word_delimited" - ); - List testTexts = List.of( - "customer acquisition cost", - "cac", - "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)" - ); - - for (String analyzer : analyzers) { - for (String text : testTexts) { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - analyzer, text - ); - List tokens = getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(tokens.contains(expected), - String.format("Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", - analyzer, text, expected, tokens))); - } - } - - // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... - List testSet = List.of( - "cac", - "customer acquisition cost" - ); - List resultCounts = testSet.stream().map(q -> { - SearchResult result = searchAcrossEntities(getSearchService(), q); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - "Expected search results for: " + q); - return result.getEntities().size(); - }).collect(Collectors.toList()); - } - - @Test - public void testNegateAnalysis() throws IOException { - String queryWithMinus = "logging_events -bckp"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_3", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("logging events -bckp")); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_gram_4", queryWithMinus - ); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - + } + }); + } + + @Test + public void testStemmingOverride() throws IOException { + Set testSet = Set.of("customer", "customers"); + + Set results = + testSet.stream() + .map(test -> searchAcrossEntities(getSearchService(), test)) + .collect(Collectors.toSet()); + + results.forEach( + r -> assertTrue(r.hasEntities() && !r.getEntities().isEmpty(), "Expected search results")); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format("Expected all result counts to match after stemming. %s", testSet)); + + // Additional inspect token + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "customers"); + + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("customer"), "Expected `customer` and not `custom`"); + } + + @Test + public void testDelimitedSynonym() throws IOException { + List expectedTokens = List.of("cac"); + List analyzers = + List.of("urn_component", "word_delimited", "query_urn_component", "query_word_delimited"); + List testTexts = + List.of( + "customer acquisition cost", + "cac", + "urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)"); + + for (String analyzer : analyzers) { + for (String text : testTexts) { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, text); + List tokens = + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + tokens.contains(expected), + String.format( + "Analyzer: `%s` Text: `%s` - Expected token `%s` in tokens: %s", + analyzer, text, expected, tokens))); + } } - @Test - public void testWordGram() throws IOException { - String text = "hello.cat_cool_customer"; - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat", "cat cool", "cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool", "cat cool customer")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hello cat cool customer")); - - String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), - List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); - - String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db", "db my", "my exact", "exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my", "db my exact", "my exact table")); - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("my db my exact", "db my exact table")); - - String textWithParens = "(hi) there"; - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of("hi there")); - - String oneWordText = "hello"; - for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { - request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); - assertEquals(getTokens(request) - .map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), List.of()); - } + // {"urn":"urn:li:dataset:(urn:li:dataPlatform:testsynonym,cac_table,TEST)","id":"cac_table",... + List testSet = List.of("cac", "customer acquisition cost"); + List resultCounts = + testSet.stream() + .map( + q -> { + SearchResult result = searchAcrossEntities(getSearchService(), q); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + q); + return result.getEntities().size(); + }) + .collect(Collectors.toList()); + } + + @Test + public void testNegateAnalysis() throws IOException { + String queryWithMinus = "logging_events -bckp"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging_events -bckp", "logging_ev", "-bckp", "log", "event", "bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("logging events -bckp")); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", queryWithMinus); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of()); + } + + @Test + public void testWordGram() throws IOException { + String text = "hello.cat_cool_customer"; + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat", "cat cool", "cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_3", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool", "cat cool customer")); + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_4", text); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hello cat cool customer")); + + String testMoreSeparators = "quick.brown:fox jumped-LAZY_Dog"; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown", "brown fox", "fox jumped", "jumped lazy", "lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox", "brown fox jumped", "fox jumped lazy", "jumped lazy dog")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", testMoreSeparators); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("quick brown fox jumped", "brown fox jumped lazy", "fox jumped lazy dog")); + + String textWithQuotesAndDuplicateWord = "\"my_db.my_exact_table\""; + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_2", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db", "db my", "my exact", "exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_3", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my", "db my exact", "my exact table")); + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_gram_4", textWithQuotesAndDuplicateWord); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("my db my exact", "db my exact table")); + + String textWithParens = "(hi) there"; + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_gram_2", textWithParens); + assertEquals( + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()), + List.of("hi there")); + + String oneWordText = "hello"; + for (String analyzer : List.of("word_gram_2", "word_gram_3", "word_gram_4")) { + request = AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", analyzer, oneWordText); + assertEquals( + getTokens(request) + .map(AnalyzeResponse.AnalyzeToken::getTerm) + .collect(Collectors.toList()), + List.of()); } - - @Test - public void testUrnSynonym() throws IOException { - List expectedTokens = List.of("bigquery"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)" - ); - List indexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedTokens.forEach(expected -> assertTrue(indexTokens.contains(expected), + } + + @Test + public void testUrnSynonym() throws IOException { + List expectedTokens = List.of("bigquery"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.bq_audit.cloudaudit_googleapis_com_activity,PROD)"); + List indexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedTokens.forEach( + expected -> + assertTrue( + indexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, indexTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - "big query" - ); - List queryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); - - List testSet = List.of( - "bigquery", - "big query" - ); - List results = testSet.stream().map(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), "Expected search results for: " + query); - return result; - }).collect(Collectors.toList()); - - assertEquals(results.stream().map(r -> r.getEntities().size()).distinct().count(), 1, - String.format("Expected all result counts (%s) to match after synonyms. %s", results, testSet)); - Assert.assertArrayEquals(results.get(0).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new), - results.get(1).getEntities().stream().map(e -> e.getEntity().toString()).sorted().toArray(String[]::new)); - } - - @Test - public void testTokenization() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "my_table" - ); - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "my_table" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("my_tabl", "tabl"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationWithNumber() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "harshal-playground-306419.test_schema.austin311_derived" - ); - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "harshal-playground-306419.test_schema.austin311_derived" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "austin311_deriv", "austin311", "deriv"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuote() throws IOException { - String testQuery = "\"test2\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationQuoteUnderscore() throws IOException { - String testQuery = "\"raw_orders\""; - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders", "raw_ord", "raw", "order"), String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - testQuery - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testTokenizationDataPlatform() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)" - ); - List tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "bigquery", "big", "query", - "harshal-playground-306419", "harshal", "playground", "306419", - "test_schema", "test", "schema", - "excess_deaths_deriv", "excess", "death", "deriv", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", "hive", - "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", - "samplehivedataset", "ac611929", "c3ac", "4b92", "aafb", "f4603ddb408a", - "prod", "production"), - String.format("Unexpected tokens. Found %s", tokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)" - ); - tokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(tokens, List.of( - "dataset", - "dataplatform", "data platform", - "test_rollback", "test", "rollback", "rollback_test_dataset"), - String.format("Unexpected tokens. Found %s", tokens)); - } - - @Test - public void testChartAutoComplete() throws InterruptedException, IOException { - // Two charts exist Baz Chart 1 & Baz Chart 2 - List.of("B", "Ba", "Baz", "Baz ", "Baz C", "Baz Ch", "Baz Cha", "Baz Char", "Baz Chart", "Baz Chart ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 2, - String.format("Expected 2 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testDatasetAutoComplete() { - List.of("excess", "excess_", "excess_d", "excess_de", "excess_death", "excess_deaths", "excess_deaths_d", - "excess_deaths_de", "excess_deaths_der", "excess_deaths_derived") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new DatasetType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testContainerAutoComplete() { - List.of("cont", "container", "container-a", "container-auto", "container-autocomp", "container-autocomp-te", - "container-autocomp-test") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new ContainerType(getEntityClient()), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected >= 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testGroupAutoComplete() { - List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpGroupType(getEntityClient()), query); - assertTrue(result.getEntities().size() == 1, - String.format("Expected 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } - - @Test - public void testUserAutoComplete() { - List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") - .forEach(query -> { - try { - AutoCompleteResults result = autocomplete(new CorpUserType(getEntityClient(), null), query); - assertTrue(result.getEntities().size() >= 1, - String.format("Expected at least 1 results for `%s` found %s", query, result.getEntities().size())); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); - } + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", "big query"); + List queryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(queryTokens, List.of("big query", "big", "query", "bigquery")); + + List testSet = List.of("bigquery", "big query"); + List results = + testSet.stream() + .map( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + "Expected search results for: " + query); + return result; + }) + .collect(Collectors.toList()); - @Test - public void testSmokeTestQueries() { - Map expectedFulltextMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 6, - STRUCTURED_QUERY_PREFIX + "sample", 3, - STRUCTURED_QUERY_PREFIX + "\"sample\"", 2, - STRUCTURED_QUERY_PREFIX + "covid", 2, - STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", 1 - ); - - Map results = expectedFulltextMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedFulltextMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + assertEquals( + results.stream().map(r -> r.getEntities().size()).distinct().count(), + 1, + String.format( + "Expected all result counts (%s) to match after synonyms. %s", results, testSet)); + Assert.assertArrayEquals( + results.get(0).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new), + results.get(1).getEntities().stream() + .map(e -> e.getEntity().toString()) + .sorted() + .toArray(String[]::new)); + } + + @Test + public void testTokenization() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", "my_table"); + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", "my_table"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("my_tabl", "tabl"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationWithNumber() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "word_delimited", + "harshal-playground-306419.test_schema.austin311_derived"); + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "harshal-playground-306419.test_schema.austin311_derived"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "austin311_deriv", + "austin311", + "deriv"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuote() throws IOException { + String testQuery = "\"test2\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(tokens, List.of("test2"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationQuoteUnderscore() throws IOException { + String testQuery = "\"raw_orders\""; + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of("raw_orders", "raw_ord", "raw", "order"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", testQuery); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, List.of("raw_orders"), String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testTokenizationDataPlatform() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:bigquery,harshal-playground-306419.test_schema.excess_deaths_derived,PROD)"); + List tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "bigquery", + "big", + "query", + "harshal-playground-306419", + "harshal", + "playground", + "306419", + "test_schema", + "test", + "schema", + "excess_deaths_deriv", + "excess", + "death", + "deriv", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset-ac611929-c3ac-4b92-aafb-f4603ddb408a,PROD)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "hive", + "samplehivedataset-ac611929-c3ac-4b92-aafb-f4603ddb408a", + "samplehivedataset", + "ac611929", + "c3ac", + "4b92", + "aafb", + "f4603ddb408a", + "prod", + "production"), + String.format("Unexpected tokens. Found %s", tokens)); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + "urn:li:dataset:(urn:li:dataPlatform:test_rollback,rollback_test_dataset,TEST)"); + tokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + tokens, + List.of( + "dataset", + "dataplatform", + "data platform", + "test_rollback", + "test", + "rollback", + "rollback_test_dataset"), + String.format("Unexpected tokens. Found %s", tokens)); + } + + @Test + public void testChartAutoComplete() throws InterruptedException, IOException { + // Two charts exist Baz Chart 1 & Baz Chart 2 + List.of( + "B", + "Ba", + "Baz", + "Baz ", + "Baz C", + "Baz Ch", + "Baz Cha", + "Baz Char", + "Baz Chart", + "Baz Chart ") + .forEach( + query -> { + try { + AutoCompleteResults result = autocomplete(new ChartType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 2, + String.format( + "Expected 2 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testDatasetAutoComplete() { + List.of( + "excess", + "excess_", + "excess_d", + "excess_de", + "excess_death", + "excess_deaths", + "excess_deaths_d", + "excess_deaths_de", + "excess_deaths_der", + "excess_deaths_derived") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new DatasetType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testContainerAutoComplete() { + List.of( + "cont", + "container", + "container-a", + "container-auto", + "container-autocomp", + "container-autocomp-te", + "container-autocomp-test") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new ContainerType(getEntityClient()), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected >= 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testGroupAutoComplete() { + List.of("T", "Te", "Tes", "Test ", "Test G", "Test Gro", "Test Group ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpGroupType(getEntityClient()), query); + assertTrue( + result.getEntities().size() == 1, + String.format( + "Expected 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testUserAutoComplete() { + List.of("D", "Da", "Dat", "Data ", "Data H", "Data Hu", "Data Hub", "Data Hub ") + .forEach( + query -> { + try { + AutoCompleteResults result = + autocomplete(new CorpUserType(getEntityClient(), null), query); + assertTrue( + result.getEntities().size() >= 1, + String.format( + "Expected at least 1 results for `%s` found %s", + query, result.getEntities().size())); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Test + public void testSmokeTestQueries() { + Map expectedFulltextMinimums = + Map.of( + "sample", + 3, + "covid", + 2, + "\"raw_orders\"", + 6, + STRUCTURED_QUERY_PREFIX + "sample", + 3, + STRUCTURED_QUERY_PREFIX + "\"sample\"", + 2, + STRUCTURED_QUERY_PREFIX + "covid", + 2, + STRUCTURED_QUERY_PREFIX + "\"raw_orders\"", + 1); + + Map results = + expectedFulltextMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedFulltextMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); - Map expectedStructuredMinimums = Map.of( - "sample", 3, - "covid", 2, - "\"raw_orders\"", 1 - ); - - results = expectedStructuredMinimums.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchStructured(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedStructuredMinimums.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s structured results, expected %s results.", key, actualCount, - expectedCount)); + Map expectedStructuredMinimums = + Map.of( + "sample", 3, + "covid", 2, + "\"raw_orders\"", 1); + + results = + expectedStructuredMinimums.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchStructured(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedStructuredMinimums.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s structured results, expected %s results.", + key, actualCount, expectedCount)); }); - } - - @Test - public void testMinNumberLengthLimit() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - "data2022.data22" - ); - List expected = List.of("data2022", "data22"); - List actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Expected: %s Actual: %s", expected, actual)); - } - - @Test - public void testUnderscore() throws IOException { - String testQuery = "bad_fraud_id"; - List expected = List.of("bad_fraud_id", "bad", "fraud"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - testQuery - ); - - List actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "word_delimited", - testQuery - ); - actual = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(actual, expected, - String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); - - } - - @Test - public void testFacets() { - Set expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + } + + @Test + public void testMinNumberLengthLimit() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "word_delimited", "data2022.data22"); + List expected = List.of("data2022", "data22"); + List actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(actual, expected, String.format("Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testUnderscore() throws IOException { + String testQuery = "bad_fraud_id"; + List expected = List.of("bad_fraud_id", "bad", "fraud"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", testQuery); + + List actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analayzer: query_word_delimited Expected: %s Actual: %s", expected, actual)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "word_delimited", testQuery); + actual = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + actual, + expected, + String.format("Analyzer: word_delimited Expected: %s Actual: %s", expected, actual)); + } + + @Test + public void testFacets() { + Set expectedFacets = Set.of("entity", "typeNames", "platform", "origin", "tags"); + SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress"); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityAggMeta = testResult.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - Map expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); - } - - @Test - public void testNestedAggregation() { - Set expectedFacets = Set.of("platform"); - SearchResult testResult = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult.getMetadata().getAggregations().size(), 1); - expectedFacets.forEach(facet -> { - assertTrue(testResult.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityAggMeta = + testResult.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + Map expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityAggMeta.getAggregations(), expectedEntityTypeCounts); + } + + @Test + public void testNestedAggregation() { + Set expectedFacets = Set.of("platform"); + SearchResult testResult = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult.getMetadata().getAggregations().size(), 1); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); - SearchResult testResult2 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult2.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult2.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult2.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + expectedFacets = Set.of("platform", "typeNames", "_entityType", "entity"); + SearchResult testResult2 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult2.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult2.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult2.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta = testResult2.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); - Map expectedEntityTypeCounts = new HashMap<>(); - expectedEntityTypeCounts.put("container", 0L); - expectedEntityTypeCounts.put("corpuser", 0L); - expectedEntityTypeCounts.put("corpgroup", 0L); - expectedEntityTypeCounts.put("mlmodel", 0L); - expectedEntityTypeCounts.put("mlfeaturetable", 1L); - expectedEntityTypeCounts.put("mlmodelgroup", 1L); - expectedEntityTypeCounts.put("dataflow", 1L); - expectedEntityTypeCounts.put("glossarynode", 1L); - expectedEntityTypeCounts.put("mlfeature", 0L); - expectedEntityTypeCounts.put("datajob", 2L); - expectedEntityTypeCounts.put("domain", 0L); - expectedEntityTypeCounts.put("tag", 0L); - expectedEntityTypeCounts.put("glossaryterm", 2L); - expectedEntityTypeCounts.put("mlprimarykey", 1L); - expectedEntityTypeCounts.put("dataset", 9L); - expectedEntityTypeCounts.put("chart", 0L); - expectedEntityTypeCounts.put("dashboard", 0L); - assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); - - expectedFacets = Set.of("platform", "typeNames", "entity"); - SearchResult testResult3 = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResult3.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResult3.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResult3.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta = + testResult2.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta.getAggregations(), entityAggMeta.getAggregations()); + Map expectedEntityTypeCounts = new HashMap<>(); + expectedEntityTypeCounts.put("container", 0L); + expectedEntityTypeCounts.put("corpuser", 0L); + expectedEntityTypeCounts.put("corpgroup", 0L); + expectedEntityTypeCounts.put("mlmodel", 0L); + expectedEntityTypeCounts.put("mlfeaturetable", 1L); + expectedEntityTypeCounts.put("mlmodelgroup", 1L); + expectedEntityTypeCounts.put("dataflow", 1L); + expectedEntityTypeCounts.put("glossarynode", 1L); + expectedEntityTypeCounts.put("mlfeature", 0L); + expectedEntityTypeCounts.put("datajob", 2L); + expectedEntityTypeCounts.put("domain", 0L); + expectedEntityTypeCounts.put("tag", 0L); + expectedEntityTypeCounts.put("glossaryterm", 2L); + expectedEntityTypeCounts.put("mlprimarykey", 1L); + expectedEntityTypeCounts.put("dataset", 9L); + expectedEntityTypeCounts.put("chart", 0L); + expectedEntityTypeCounts.put("dashboard", 0L); + assertEquals(entityTypeAggMeta.getAggregations(), expectedEntityTypeCounts); + + expectedFacets = Set.of("platform", "typeNames", "entity"); + SearchResult testResult3 = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResult3.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResult3.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResult3.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - AggregationMetadata entityTypeAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("_entityType")).findFirst().get(); - AggregationMetadata entityAggMeta3 = testResult3.getMetadata().getAggregations().stream().filter( - aggMeta -> aggMeta.getName().equals("entity")).findFirst().get(); - assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); - assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); - - String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); - expectedFacets = Set.of(singleNestedFacet); - SearchResult testResultSingleNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); - Map expectedNestedFacetCounts = new HashMap<>(); - expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); - expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); - expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); - expectedNestedFacetCounts.put("mlfeaturetable", 1L); - expectedNestedFacetCounts.put("mlmodelgroup", 1L); - expectedNestedFacetCounts.put("glossarynode", 1L); - expectedNestedFacetCounts.put("dataflow", 1L); - expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); - expectedNestedFacetCounts.put("datajob", 2L); - expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); - expectedNestedFacetCounts.put("glossaryterm", 2L); - expectedNestedFacetCounts.put("dataset", 9L); - expectedNestedFacetCounts.put("mlprimarykey", 1L); - assertEquals(testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), expectedNestedFacetCounts); - - expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); - SearchResult testResultNested = searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); - assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); - expectedFacets.forEach(facet -> { - assertTrue(testResultNested.getMetadata().getAggregations().stream().anyMatch(agg -> agg.getName().equals(facet)), - String.format("Failed to find facet `%s` in %s", facet, - testResultNested.getMetadata().getAggregations().stream() - .map(AggregationMetadata::getName).collect(Collectors.toList()))); + AggregationMetadata entityTypeAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("_entityType")) + .findFirst() + .get(); + AggregationMetadata entityAggMeta3 = + testResult3.getMetadata().getAggregations().stream() + .filter(aggMeta -> aggMeta.getName().equals("entity")) + .findFirst() + .get(); + assertEquals(entityTypeAggMeta3.getAggregations(), entityAggMeta3.getAggregations()); + assertEquals(entityTypeAggMeta3.getAggregations(), expectedEntityTypeCounts); + + String singleNestedFacet = String.format("_entityType%sowners", AGGREGATION_SEPARATOR_CHAR); + expectedFacets = Set.of(singleNestedFacet); + SearchResult testResultSingleNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultSingleNested.getMetadata().getAggregations().size(), 1); + Map expectedNestedFacetCounts = new HashMap<>(); + expectedNestedFacetCounts.put("datajob␞urn:li:corpuser:datahub", 2L); + expectedNestedFacetCounts.put("glossarynode␞urn:li:corpuser:jdoe", 1L); + expectedNestedFacetCounts.put("dataflow␞urn:li:corpuser:datahub", 1L); + expectedNestedFacetCounts.put("mlfeaturetable", 1L); + expectedNestedFacetCounts.put("mlmodelgroup", 1L); + expectedNestedFacetCounts.put("glossarynode", 1L); + expectedNestedFacetCounts.put("dataflow", 1L); + expectedNestedFacetCounts.put("mlmodelgroup␞urn:li:corpuser:some-user", 1L); + expectedNestedFacetCounts.put("datajob", 2L); + expectedNestedFacetCounts.put("glossaryterm␞urn:li:corpuser:jdoe", 2L); + expectedNestedFacetCounts.put("glossaryterm", 2L); + expectedNestedFacetCounts.put("dataset", 9L); + expectedNestedFacetCounts.put("mlprimarykey", 1L); + assertEquals( + testResultSingleNested.getMetadata().getAggregations().get(0).getAggregations(), + expectedNestedFacetCounts); + + expectedFacets = Set.of("platform", singleNestedFacet, "typeNames", "origin"); + SearchResult testResultNested = + searchAcrossEntities(getSearchService(), "cypress", List.copyOf(expectedFacets)); + assertEquals(testResultNested.getMetadata().getAggregations().size(), 4); + expectedFacets.forEach( + facet -> { + assertTrue( + testResultNested.getMetadata().getAggregations().stream() + .anyMatch(agg -> agg.getName().equals(facet)), + String.format( + "Failed to find facet `%s` in %s", + facet, + testResultNested.getMetadata().getAggregations().stream() + .map(AggregationMetadata::getName) + .collect(Collectors.toList()))); }); - List expectedNestedAgg = testResultNested.getMetadata().getAggregations().stream().filter( - agg -> agg.getName().equals(singleNestedFacet)).collect(Collectors.toList()); - assertEquals(expectedNestedAgg.size(), 1); - AggregationMetadata nestedAgg = expectedNestedAgg.get(0); - assertEquals(nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); - } - - @Test - public void testPartialUrns() throws IOException { - Set expectedQueryTokens = Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); - Set expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + List expectedNestedAgg = + testResultNested.getMetadata().getAggregations().stream() + .filter(agg -> agg.getName().equals(singleNestedFacet)) + .collect(Collectors.toList()); + assertEquals(expectedNestedAgg.size(), 1); + AggregationMetadata nestedAgg = expectedNestedAgg.get(0); + assertEquals( + nestedAgg.getDisplayName(), String.format("Type%sOwned By", AGGREGATION_SEPARATOR_CHAR)); + } + + @Test + public void testPartialUrns() throws IOException { + Set expectedQueryTokens = + Set.of("dataplatform", "data platform", "samplehdfsdataset", "prod", "production"); + Set expectedIndexTokens = + Set.of("dataplatform", "data platform", "hdfs", "samplehdfsdataset", "prod", "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "query_urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - List searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", + "urn_component", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + List searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testPartialUnderscoreUrns() throws IOException { - String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; - Set expectedQueryTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - Set expectedIndexTokens = Set.of("dataplatform", "data platform", "hdfs", "party_email", "parti", - "email", "prod", "production"); - - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_urn_component", - testQuery - ); - List searchQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedQueryTokens.forEach(expected -> assertTrue(searchQueryTokens.contains(expected), + } + + @Test + public void testPartialUnderscoreUrns() throws IOException { + String testQuery = ":(urn:li:dataPlatform:hdfs,party_email,PROD)"; + Set expectedQueryTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + Set expectedIndexTokens = + Set.of( + "dataplatform", + "data platform", + "hdfs", + "party_email", + "parti", + "email", + "prod", + "production"); + + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_urn_component", testQuery); + List searchQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedQueryTokens.forEach( + expected -> + assertTrue( + searchQueryTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchQueryTokens))); - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "urn_component", - testQuery - ); - List searchIndexTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - expectedIndexTokens.forEach(expected -> assertTrue(searchIndexTokens.contains(expected), + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "urn_component", testQuery); + List searchIndexTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + expectedIndexTokens.forEach( + expected -> + assertTrue( + searchIndexTokens.contains(expected), String.format("Expected token `%s` in %s", expected, searchIndexTokens))); - } - - @Test - public void testScrollAcrossEntities() throws IOException { - String query = "logging_events"; - final int batchSize = 1; - int totalResults = 0; - String scrollId = null; - do { - ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); - int numResults = result.hasEntities() ? result.getEntities().size() : 0; - assertTrue(numResults <= batchSize); - totalResults += numResults; - scrollId = result.getScrollId(); - } while (scrollId != null); - // expect 8 total matching results - assertEquals(totalResults, 8); - } - - @Test - public void testSearchAcrossMultipleEntities() { - String query = "logging_events"; - SearchResult result = search(getSearchService(), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 8); - result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); - assertEquals((int) result.getNumEntities(), 4); - } - - @Test - public void testQuotedAnalyzer() throws IOException { - AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - List searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens, String.format("Actual %s", searchQuotedQueryTokens)); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"party_email\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("party_email"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test2\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test2"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "quote_analyzer", - "\"test_BYTES_LIST_feature\"" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); - - request = AnalyzeRequest.withIndexAnalyzer( - "smpldat_datasetindex_v2", - "query_word_delimited", - "test_BYTES_LIST_feature" - ); - searchQuotedQueryTokens = getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); - assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); - } - - @Test - public void testFragmentUrns() { - List testSet = List.of( - "hdfs,SampleHdfsDataset,PROD", - "hdfs,SampleHdfsDataset", - "SampleHdfsDataset", - "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", - ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)" - ); - - testSet.forEach(query -> { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected partial urn search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + } + + @Test + public void testScrollAcrossEntities() throws IOException { + String query = "logging_events"; + final int batchSize = 1; + int totalResults = 0; + String scrollId = null; + do { + ScrollResult result = scroll(getSearchService(), query, batchSize, scrollId); + int numResults = result.hasEntities() ? result.getEntities().size() : 0; + assertTrue(numResults <= batchSize); + totalResults += numResults; + scrollId = result.getScrollId(); + } while (scrollId != null); + // expect 8 total matching results + assertEquals(totalResults, 8); + } + + @Test + public void testSearchAcrossMultipleEntities() { + String query = "logging_events"; + SearchResult result = search(getSearchService(), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME, DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 8); + result = search(getSearchService(), List.of(DATASET_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + result = search(getSearchService(), List.of(DATA_JOB_ENTITY_NAME), query); + assertEquals((int) result.getNumEntities(), 4); + } + + @Test + public void testQuotedAnalyzer() throws IOException { + AnalyzeRequest request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + List searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals( + List.of("party_email"), + searchQuotedQueryTokens, + String.format("Actual %s", searchQuotedQueryTokens)); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"party_email\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("party_email"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer("smpldat_datasetindex_v2", "quote_analyzer", "\"test2\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test2"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "quote_analyzer", "\"test_BYTES_LIST_feature\""); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertEquals(List.of("test_bytes_list_feature"), searchQuotedQueryTokens); + + request = + AnalyzeRequest.withIndexAnalyzer( + "smpldat_datasetindex_v2", "query_word_delimited", "test_BYTES_LIST_feature"); + searchQuotedQueryTokens = + getTokens(request).map(AnalyzeResponse.AnalyzeToken::getTerm).collect(Collectors.toList()); + assertTrue(searchQuotedQueryTokens.contains("test_bytes_list_featur")); + } + + @Test + public void testFragmentUrns() { + List testSet = + List.of( + "hdfs,SampleHdfsDataset,PROD", + "hdfs,SampleHdfsDataset", + "SampleHdfsDataset", + "(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + "urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD", + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)", + ":(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + + testSet.forEach( + query -> { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected partial urn search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); }); - } - - @Test - public void testPlatformTest() { - List testFields = List.of("platform.keyword", "platform"); - final String testPlatform = "urn:li:dataPlatform:dbt"; - - // Ensure backend code path works as expected - List results = testFields.stream() - .map(fieldName -> { - final String query = String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); - SearchResult result = searchStructured(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - return result; + } + + @Test + public void testPlatformTest() { + List testFields = List.of("platform.keyword", "platform"); + final String testPlatform = "urn:li:dataPlatform:dbt"; + + // Ensure backend code path works as expected + List results = + testFields.stream() + .map( + fieldName -> { + final String query = + String.format("%s:%s", fieldName, testPlatform.replaceAll(":", "\\\\:")); + SearchResult result = searchStructured(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format( + "%s - Expected search results to include matched fields", query)); + return result; }) - .collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(results.get(idx).getEntities().size(), 9, - String.format("Search results for fields `%s` != 9", testFields.get(idx))); - }); + .collect(Collectors.toList()); - // Construct problematic search entity query - List testFilters = testFields.stream() - .map(fieldName -> { - Filter filter = new Filter(); - ArrayList criteria = new ArrayList<>(); - Criterion hasPlatformCriterion = new Criterion().setField(fieldName).setCondition(Condition.EQUAL).setValue(testPlatform); - criteria.add(hasPlatformCriterion); - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - return filter; - }).collect(Collectors.toList()); - - // Test variations of fulltext flags - for (Boolean fulltextFlag : List.of(true, false)) { - - // Test field variations with/without .keyword - List entityClientResults = testFilters.stream().map(filter -> { - try { - return getEntityClient().search("dataset", "*", filter, null, 0, 100, - AUTHENTICATION, new SearchFlags().setFulltext(fulltextFlag)); - } catch (RemoteInvocationException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toList()); - - IntStream.range(0, testFields.size()).forEach(idx -> { - assertEquals(entityClientResults.get(idx).getEntities().size(), 9, - String.format("Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", fulltextFlag, testFields.get(idx))); + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + results.get(idx).getEntities().size(), + 9, + String.format("Search results for fields `%s` != 9", testFields.get(idx))); }); - } - } - - @Test - public void testStructQueryFieldMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryFieldPrefixMatch() { - String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - } - - @Test - public void testStructQueryCustomPropertiesKeyPrefix() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testStructQueryCustomPropertiesMatch() { - String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; - SearchResult result = searchAcrossEntities(getSearchService(), query); + // Construct problematic search entity query + List testFilters = + testFields.stream() + .map( + fieldName -> { + Filter filter = new Filter(); + ArrayList criteria = new ArrayList<>(); + Criterion hasPlatformCriterion = + new Criterion() + .setField(fieldName) + .setCondition(Condition.EQUAL) + .setValue(testPlatform); + criteria.add(hasPlatformCriterion); + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return filter; + }) + .collect(Collectors.toList()); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); + // Test variations of fulltext flags + for (Boolean fulltextFlag : List.of(true, false)) { - assertEquals(result.getEntities().size(), 5); + // Test field variations with/without .keyword + List entityClientResults = + testFilters.stream() + .map( + filter -> { + try { + return getEntityClient() + .search( + "dataset", + "*", + filter, + null, + 0, + 100, + AUTHENTICATION, + new SearchFlags().setFulltext(fulltextFlag)); + } catch (RemoteInvocationException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList()); + + IntStream.range(0, testFields.size()) + .forEach( + idx -> { + assertEquals( + entityClientResults.get(idx).getEntities().size(), + 9, + String.format( + "Search results for entityClient fields (fulltextFlag: %s): `%s` != 9", + fulltextFlag, testFields.get(idx))); + }); } - - @Test - public void testCustomPropertiesQuoted() { - Map expectedResults = Map.of( - "\"materialization=view\"", 3, - STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", 3 - ); - - Map results = expectedResults.entrySet().stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); - - results.forEach((key, value) -> { - Integer actualCount = value.getEntities().size(); - Integer expectedCount = expectedResults.get(key); - assertSame(actualCount, expectedCount, - String.format("Search term `%s` has %s fulltext results, expected %s results.", key, actualCount, - expectedCount)); + } + + @Test + public void testStructQueryFieldMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryFieldPrefixMatch() { + String query = STRUCTURED_QUERY_PREFIX + "name: customers*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testStructQueryCustomPropertiesKeyPrefix() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testStructQueryCustomPropertiesMatch() { + String query = STRUCTURED_QUERY_PREFIX + "customProperties: node_type=model"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 5); + } + + @Test + public void testCustomPropertiesQuoted() { + Map expectedResults = + Map.of( + "\"materialization=view\"", + 3, + STRUCTURED_QUERY_PREFIX + "customProperties:\"materialization=view\"", + 3); + + Map results = + expectedResults.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> searchAcrossEntities(getSearchService(), entry.getKey()))); + + results.forEach( + (key, value) -> { + Integer actualCount = value.getEntities().size(); + Integer expectedCount = expectedResults.get(key); + assertSame( + actualCount, + expectedCount, + String.format( + "Search term `%s` has %s fulltext results, expected %s results.", + key, actualCount, expectedCount)); }); + } + + @Test + public void testStructQueryFieldPaths() { + String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 3); + } + + @Test + public void testStructQueryBoolean() { + String query = + STRUCTURED_QUERY_PREFIX + + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 2); + + query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + + query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; + result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 1); + } + + @Test + public void testStructQueryBrowsePaths() { + String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 9); + } + + @Test + public void testOr() { + String query = "stg_customers | logging_events"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "stg_customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 1); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testNegate() { + String query = "logging_events -bckp"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 7); + + query = "logging_events"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testPrefix() { + String query = "bigquery"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "big*"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + } + + @Test + public void testParens() { + String query = "dbt | (bigquery + covid19)"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 11); + + query = "dbt"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 9); + + query = "bigquery + covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + + query = "bigquery"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 8); + + query = "covid19"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + assertEquals(result.getEntities().size(), 2); + } + + @Test + public void testGram() { + String query = "jaffle shop customers"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match in 1st position"); + + query = "shop customers source"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop stg customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "jaffle shop transformers customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", + "Expected ngram match in 1st position"); + + query = "shop raw customers"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", + "Expected ngram match in 1st position"); + } + + @Test + public void testPrefixVsExact() { + String query = "\"customers\""; + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), 10); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", + "Expected exact match and 1st position"); + } + + // Note: This test can fail if not using .keyword subfields (check for possible query builder + // regression) + @Test + public void testPrefixVsExactCaseSensitivity() { + List insensitiveExactMatches = + List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); + for (String query : insensitiveExactMatches) { + SearchResult result = searchAcrossEntities(getSearchService(), query); + + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected exact match as first match with matching case"); } - - @Test - public void testStructQueryFieldPaths() { - String query = STRUCTURED_QUERY_PREFIX + "fieldPaths: customer_id"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 3); - } - - @Test - public void testStructQueryBoolean() { - String query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy OR tags:urn\\:li\\:tag\\:testTag"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 2); - - query = STRUCTURED_QUERY_PREFIX + "editedFieldTags:urn\\:li\\:tag\\:Legacy"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - - query = STRUCTURED_QUERY_PREFIX + "tags:urn\\:li\\:tag\\:testTag"; - result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 1); - } - - @Test - public void testStructQueryBrowsePaths() { - String query = STRUCTURED_QUERY_PREFIX + "browsePaths:*/dbt/*"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 9); - } - - @Test - public void testOr() { - String query = "stg_customers | logging_events"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "stg_customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 1); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testNegate() { - String query = "logging_events -bckp"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 7); - - query = "logging_events"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testPrefix() { - String query = "bigquery"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "big*"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - } - - @Test - public void testParens() { - String query = "dbt | (bigquery + covid19)"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 11); - - query = "dbt"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 9); - - query = "bigquery + covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - - query = "bigquery"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 8); - - query = "covid19"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - assertEquals(result.getEntities().size(), 2); - } - @Test - public void testGram() { - String query = "jaffle shop customers"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match in 1st position"); - - query = "shop customers source"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers_source,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop stg customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.stg_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "jaffle shop transformers customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.transformers_customers,PROD)", - "Expected ngram match in 1st position"); - - query = "shop raw customers"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.raw_customers,PROD)", - "Expected ngram match in 1st position"); - } - - @Test - public void testPrefixVsExact() { - String query = "\"customers\""; - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), 10); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:dbt,cypress_project.jaffle_shop.customers,PROD)", - "Expected exact match and 1st position"); - } - - // Note: This test can fail if not using .keyword subfields (check for possible query builder regression) - @Test - public void testPrefixVsExactCaseSensitivity() { - List insensitiveExactMatches = List.of("testExactMatchCase", "testexactmatchcase", "TESTEXACTMATCHCASE"); - for (String query : insensitiveExactMatches) { - SearchResult result = searchAcrossEntities(getSearchService(), query); - - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertEquals(result.getEntities().size(), insensitiveExactMatches.size()); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected exact match as first match with matching case"); - } - } - - @Test - public void testColumnExactMatch() { - String query = "unit_data"; - SearchResult result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", - "Expected table name exact match first"); - - query = "special_column_only_present_here_info"; - result = searchAcrossEntities(getSearchService(), query); - assertTrue(result.hasEntities() && !result.getEntities().isEmpty(), - String.format("%s - Expected search results", query)); - assertTrue(result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), - String.format("%s - Expected search results to include matched fields", query)); - - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - assertEquals(result.getEntities().get(0).getEntity().toString(), - "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", - "Expected table with column name exact match first"); - } - - @Test - public void testSortOrdering() { - String query = "unit_data"; - SortCriterion criterion = new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); - SearchResult result = getSearchService().searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, criterion, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), null); - assertTrue(result.getEntities().size() > 2, - String.format("%s - Expected search results to have at least two results", query)); - } - - private Stream getTokens(AnalyzeRequest request) throws IOException { - return getSearchClient().indices().analyze(request, RequestOptions.DEFAULT).getTokens().stream(); - } + } + + @Test + public void testColumnExactMatch() { + String query = "unit_data"; + SearchResult result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + query + ",PROD)", + "Expected table name exact match first"); + + query = "special_column_only_present_here_info"; + result = searchAcrossEntities(getSearchService(), query); + assertTrue( + result.hasEntities() && !result.getEntities().isEmpty(), + String.format("%s - Expected search results", query)); + assertTrue( + result.getEntities().stream().noneMatch(e -> e.getMatchedFields().isEmpty()), + String.format("%s - Expected search results to include matched fields", query)); + + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + assertEquals( + result.getEntities().get(0).getEntity().toString(), + "urn:li:dataset:(urn:li:dataPlatform:testOnly," + "important_units" + ",PROD)", + "Expected table with column name exact match first"); + } + + @Test + public void testSortOrdering() { + String query = "unit_data"; + SortCriterion criterion = + new SortCriterion().setOrder(SortOrder.ASCENDING).setField("lastOperationTime"); + SearchResult result = + getSearchService() + .searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + criterion, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + null); + assertTrue( + result.getEntities().size() > 2, + String.format("%s - Expected search results to have at least two results", query)); + } + + private Stream getTokens(AnalyzeRequest request) + throws IOException { + return getSearchClient() + .indices() + .analyze(request, RequestOptions.DEFAULT) + .getTokens() + .stream(); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java index 4472af339c074..2c395875a1d6b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/IndexBuilderTestBase.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.systemmetadata.SystemMetadataMappingsBuilder; import com.linkedin.metadata.version.GitVersion; +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.opensearch.OpenSearchException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.delete.DeleteIndexRequest; @@ -20,198 +29,270 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; +public abstract class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { -import static org.testng.Assert.*; - -abstract public class IndexBuilderTestBase extends AbstractTestNGSpringContextTests { + @Nonnull + protected abstract RestHighLevelClient getSearchClient(); - @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + private static IndicesClient _indexClient; + private static final String TEST_INDEX_NAME = "esindex_builder_test"; + private static ESIndexBuilder testDefaultBuilder; - private static IndicesClient _indexClient; - private static final String TEST_INDEX_NAME = "esindex_builder_test"; - private static ESIndexBuilder testDefaultBuilder; + @BeforeClass + public void setup() { + _indexClient = getSearchClient().indices(); + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + testDefaultBuilder = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } + @BeforeMethod + public static void wipe() throws Exception { + try { + _indexClient + .getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) + .getAliases() + .keySet() + .forEach( + index -> { + try { + _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); - @BeforeClass - public void setup() { - _indexClient = getSearchClient().indices(); - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - testDefaultBuilder = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); + _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); + } catch (OpenSearchException exception) { + if (exception.status() != RestStatus.NOT_FOUND) { + throw exception; + } } + } - @BeforeMethod - public static void wipe() throws Exception { - try { - _indexClient.getAlias(new GetAliasesRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT) - .getAliases().keySet().forEach(index -> { - try { - _indexClient.delete(new DeleteIndexRequest(index), RequestOptions.DEFAULT); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); - - _indexClient.delete(new DeleteIndexRequest(TEST_INDEX_NAME), RequestOptions.DEFAULT); - } catch (OpenSearchException exception) { - if (exception.status() != RestStatus.NOT_FOUND) { - throw exception; - } - } - } + public static GetIndexResponse getTestIndex() throws IOException { + return _indexClient.get( + new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); + } - public static GetIndexResponse getTestIndex() throws IOException { - return _indexClient.get(new GetIndexRequest(TEST_INDEX_NAME).includeDefaults(true), RequestOptions.DEFAULT); - } + @Test + public void testESIndexBuilderCreation() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder customIndexBuilder = + new ESIndexBuilder( + getSearchClient(), + 2, + 0, + 1, + 0, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + GetIndexResponse resp = getTestIndex(); - @Test - public void testESIndexBuilderCreation() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder customIndexBuilder = new ESIndexBuilder(getSearchClient(), 2, 0, 1, - 0, Map.of(), false, false, - new ElasticSearchConfiguration(), gitVersion); - customIndexBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - GetIndexResponse resp = getTestIndex(); - - assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - } + assertEquals("2", resp.getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + assertEquals("0", resp.getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", resp.getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + } - @Test - public void testMappingReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder enabledMappingReindex = new ESIndexBuilder(getSearchClient(), 1, 0, 0, - 0, Map.of(), false, true, - new ElasticSearchConfiguration(), gitVersion); + @Test + public void testMappingReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder enabledMappingReindex = + new ESIndexBuilder( + getSearchClient(), + 1, + 0, + 0, + 0, + Map.of(), + false, + true, + new ElasticSearchConfiguration(), + gitVersion); - // No mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + // No mappings + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - // add new mappings - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); + // add new mappings + enabledMappingReindex.buildIndex( + TEST_INDEX_NAME, SystemMetadataMappingsBuilder.getMappings(), Map.of()); - String afterAddedMappingCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - assertEquals(beforeCreationDate, afterAddedMappingCreationDate, "Expected no reindex on *adding* mappings"); + String afterAddedMappingCreationDate = + getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, + afterAddedMappingCreationDate, + "Expected no reindex on *adding* mappings"); - // change mappings - Map newProps = ((Map) SystemMetadataMappingsBuilder.getMappings().get("properties")) - .entrySet().stream() - .map(m -> !m.getKey().equals("urn") ? m - : Map.entry("urn", ImmutableMap.builder().put("type", "text").build())) + // change mappings + Map newProps = + ((Map) SystemMetadataMappingsBuilder.getMappings().get("properties")) + .entrySet().stream() + .map( + m -> + !m.getKey().equals("urn") + ? m + : Map.entry( + "urn", + ImmutableMap.builder().put("type", "text").build())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); + enabledMappingReindex.buildIndex(TEST_INDEX_NAME, Map.of("properties", newProps), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); - Map.Entry> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - String afterChangedMappingCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterChangedMappingCreationDate, "Expected reindex on *changing* mappings"); - } + Map.Entry> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); + String afterChangedMappingCreationDate = + getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, + afterChangedMappingCreationDate, + "Expected reindex on *changing* mappings"); + } + + @Test + public void testSettingsNumberOfShardsReindex() throws Exception { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + String expectedShards = "5"; + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder changedShardBuilder = + new ESIndexBuilder( + getSearchClient(), + Integer.parseInt(expectedShards), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + + // add new shard setting + changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), + "Expected original index to be replaced with alias"); + + Map.Entry> newIndex = + getTestIndex().getAliases().entrySet().stream() + .filter( + e -> + e.getValue().stream() + .anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) + .findFirst() + .get(); - @Test - public void testSettingsNumberOfShardsReindex() throws Exception { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("1", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_shards")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - String expectedShards = "5"; - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder changedShardBuilder = new ESIndexBuilder(getSearchClient(), - Integer.parseInt(expectedShards), + String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); + assertNotEquals( + beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); + assertEquals( + expectedShards, + getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), + "Expected number of shards: " + expectedShards); + } + + @Test + public void testSettingsNoReindex() throws Exception { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + List noReindexBuilders = + List.of( + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas(), + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds() + 10, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards() + 1, testDefaultBuilder.getNumReplicas(), testDefaultBuilder.getNumRetries(), testDefaultBuilder.getRefreshIntervalSeconds(), Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion); - - // add new shard setting - changedShardBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.stream(getTestIndex().getIndices()).noneMatch(name -> name.equals(TEST_INDEX_NAME)), - "Expected original index to be replaced with alias"); - - Map.Entry> newIndex = getTestIndex().getAliases().entrySet().stream() - .filter(e -> e.getValue().stream().anyMatch(aliasMeta -> aliasMeta.alias().equals(TEST_INDEX_NAME))) - .findFirst().get(); - - String afterCreationDate = getTestIndex().getSetting(newIndex.getKey(), "index.creation_date"); - assertNotEquals(beforeCreationDate, afterCreationDate, "Expected reindex to result in different timestamp"); - assertEquals(expectedShards, getTestIndex().getSetting(newIndex.getKey(), "index.number_of_shards"), - "Expected number of shards: " + expectedShards); - } + false, + false, + new ElasticSearchConfiguration(), + gitVersion), + new ESIndexBuilder( + getSearchClient(), + testDefaultBuilder.getNumShards(), + testDefaultBuilder.getNumReplicas() + 1, + testDefaultBuilder.getNumRetries(), + testDefaultBuilder.getRefreshIntervalSeconds(), + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion)); - @Test - public void testSettingsNoReindex() throws Exception { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - List noReindexBuilders = List.of( - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds() + 10, - Map.of(), - true, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards() + 1, - testDefaultBuilder.getNumReplicas(), - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion), - new ESIndexBuilder(getSearchClient(), - testDefaultBuilder.getNumShards(), - testDefaultBuilder.getNumReplicas() + 1, - testDefaultBuilder.getNumRetries(), - testDefaultBuilder.getRefreshIntervalSeconds(), - Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion) - ); - - for (ESIndexBuilder builder : noReindexBuilders) { - // Set test defaults - testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - // build index with builder - builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); - assertTrue(Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), - "Expected original index to remain"); - String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); - - assertEquals(beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); - assertEquals(String.valueOf(builder.getNumReplicas()), getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); - assertEquals(builder.getRefreshIntervalSeconds() + "s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); - - wipe(); - } - } + for (ESIndexBuilder builder : noReindexBuilders) { + // Set test defaults + testDefaultBuilder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertEquals("0", getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals("0s", getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + String beforeCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + + // build index with builder + builder.buildIndex(TEST_INDEX_NAME, Map.of(), Map.of()); + assertTrue( + Arrays.asList(getTestIndex().getIndices()).contains(TEST_INDEX_NAME), + "Expected original index to remain"); + String afterCreationDate = getTestIndex().getSetting(TEST_INDEX_NAME, "index.creation_date"); + assertEquals( + beforeCreationDate, afterCreationDate, "Expected no difference in index timestamp"); + assertEquals( + String.valueOf(builder.getNumReplicas()), + getTestIndex().getSetting(TEST_INDEX_NAME, "index.number_of_replicas")); + assertEquals( + builder.getRefreshIntervalSeconds() + "s", + getTestIndex().getSetting(TEST_INDEX_NAME, "index.refresh_interval")); + + wipe(); + } + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index d9f2f0e5aac94..02bd186ccc183 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -1,16 +1,14 @@ package com.linkedin.metadata.search.indexbuilder; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableMap; import com.linkedin.metadata.TestEntitySpecBuilder; -import java.util.Map; - import com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder; +import java.util.Map; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class MappingsBuilderTest { @Test @@ -19,14 +17,33 @@ public void testMappingsBuilder() { assertEquals(result.size(), 1); Map properties = (Map) result.get("properties"); assertEquals(properties.size(), 20); - assertEquals(properties.get("urn"), ImmutableMap.of("type", "keyword", + assertEquals( + properties.get("urn"), + ImmutableMap.of( + "type", + "keyword", "fields", - ImmutableMap.of("delimited", - ImmutableMap.of("type", "text", "analyzer", "urn_component", "search_analyzer", "query_urn_component", - "search_quote_analyzer", "quote_analyzer"), - "ngram", - ImmutableMap.of("type", "search_as_you_type", "max_shingle_size", "4", "doc_values", "false", - "analyzer", "partial_urn_component")))); + ImmutableMap.of( + "delimited", + ImmutableMap.of( + "type", + "text", + "analyzer", + "urn_component", + "search_analyzer", + "query_urn_component", + "search_quote_analyzer", + "quote_analyzer"), + "ngram", + ImmutableMap.of( + "type", + "search_as_you_type", + "max_shingle_size", + "4", + "doc_values", + "false", + "analyzer", + "partial_urn_component")))); assertEquals(properties.get("runId"), ImmutableMap.of("type", "keyword")); assertTrue(properties.containsKey("browsePaths")); assertTrue(properties.containsKey("browsePathV2")); @@ -37,24 +54,30 @@ public void testMappingsBuilder() { Map keyPart3FieldSubfields = (Map) keyPart3Field.get("fields"); assertEquals(keyPart3FieldSubfields.size(), 1); assertTrue(keyPart3FieldSubfields.containsKey("keyword")); - Map customPropertiesField = (Map) properties.get("customProperties"); + Map customPropertiesField = + (Map) properties.get("customProperties"); assertEquals(customPropertiesField.get("type"), "keyword"); assertEquals(customPropertiesField.get("normalizer"), "keyword_normalizer"); - Map customPropertiesFieldSubfields = (Map) customPropertiesField.get("fields"); + Map customPropertiesFieldSubfields = + (Map) customPropertiesField.get("fields"); assertEquals(customPropertiesFieldSubfields.size(), 1); assertTrue(customPropertiesFieldSubfields.containsKey("keyword")); // TEXT - Map nestedArrayStringField = (Map) properties.get("nestedArrayStringField"); + Map nestedArrayStringField = + (Map) properties.get("nestedArrayStringField"); assertEquals(nestedArrayStringField.get("type"), "keyword"); assertEquals(nestedArrayStringField.get("normalizer"), "keyword_normalizer"); - Map nestedArrayStringFieldSubfields = (Map) nestedArrayStringField.get("fields"); + Map nestedArrayStringFieldSubfields = + (Map) nestedArrayStringField.get("fields"); assertEquals(nestedArrayStringFieldSubfields.size(), 2); assertTrue(nestedArrayStringFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayStringFieldSubfields.containsKey("keyword")); - Map nestedArrayArrayField = (Map) properties.get("nestedArrayArrayField"); + Map nestedArrayArrayField = + (Map) properties.get("nestedArrayArrayField"); assertEquals(nestedArrayArrayField.get("type"), "keyword"); assertEquals(nestedArrayArrayField.get("normalizer"), "keyword_normalizer"); - Map nestedArrayArrayFieldSubfields = (Map) nestedArrayArrayField.get("fields"); + Map nestedArrayArrayFieldSubfields = + (Map) nestedArrayArrayField.get("fields"); assertEquals(nestedArrayArrayFieldSubfields.size(), 2); assertTrue(nestedArrayArrayFieldSubfields.containsKey("delimited")); assertTrue(nestedArrayArrayFieldSubfields.containsKey("keyword")); @@ -77,7 +100,8 @@ public void testMappingsBuilder() { Map textArrayField = (Map) properties.get("textArrayField"); assertEquals(textArrayField.get("type"), "keyword"); assertEquals(textArrayField.get("normalizer"), "keyword_normalizer"); - Map textArrayFieldSubfields = (Map) textArrayField.get("fields"); + Map textArrayFieldSubfields = + (Map) textArrayField.get("fields"); assertEquals(textArrayFieldSubfields.size(), 3); assertTrue(textArrayFieldSubfields.containsKey("delimited")); assertTrue(textArrayFieldSubfields.containsKey("ngram")); @@ -108,7 +132,8 @@ public void testMappingsBuilder() { Map nestedForeignKey = (Map) properties.get("nestedForeignKey"); assertEquals(nestedForeignKey.get("type"), "text"); assertEquals(nestedForeignKey.get("analyzer"), "urn_component"); - Map nestedForeignKeySubfields = (Map) nestedForeignKey.get("fields"); + Map nestedForeignKeySubfields = + (Map) nestedForeignKey.get("fields"); assertEquals(nestedForeignKeySubfields.size(), 2); assertTrue(nestedForeignKeySubfields.containsKey("keyword")); assertTrue(nestedForeignKeySubfields.containsKey("ngram")); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java index 3896ba749e85e..dd30010b08758 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/GoldenOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.fixtures.GoldenTestBase; @@ -11,34 +13,35 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class GoldenOpenSearchTest extends GoldenTestBase { - @Autowired - @Qualifier("longTailSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - - @NotNull - @Override - protected EntityRegistry getEntityRegistry() { - return entityRegistry; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - assertNotNull(searchService); - } + @Autowired + @Qualifier("longTailSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @NotNull + @Override + protected EntityRegistry getEntityRegistry() { + return entityRegistry; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + assertNotNull(searchService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java index 312b56364bd91..ef1ed51eb4799 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/IndexBuilderOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.search.indexbuilder.IndexBuilderTestBase; import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import org.jetbrains.annotations.NotNull; @@ -8,23 +10,19 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class IndexBuilderOpenSearchTest extends IndexBuilderTestBase { - @Autowired - private RestHighLevelClient _searchClient; + @Autowired private RestHighLevelClient _searchClient; - @NotNull - @Override - protected RestHighLevelClient getSearchClient() { - return _searchClient; - } + @NotNull + @Override + protected RestHighLevelClient getSearchClient() { + return _searchClient; + } - @Test - public void initTest() { - assertNotNull(_searchClient); - } + @Test + public void initTest() { + assertNotNull(_searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java index 6fc0677ad6e39..cc17e3287544c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageDataFixtureOpenSearchTest.java @@ -12,32 +12,35 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - -@Import({OpenSearchSuite.class, SearchLineageFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchLineageFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageDataFixtureOpenSearchTest extends LineageDataFixtureTestBase { - @Autowired - @Qualifier("searchLineageSearchService") - protected SearchService searchService; - - @Autowired - @Qualifier("searchLineageLineageSearchService") - protected LineageSearchService lineageService; - - @NotNull - @Override - protected LineageSearchService getLineageService() { - return lineageService; - } - - @NotNull - @Override - protected SearchService getSearchService() { - return searchService; - } - - @Test - public void initTest() { - AssertJUnit.assertNotNull(lineageService); - } + @Autowired + @Qualifier("searchLineageSearchService") + protected SearchService searchService; + + @Autowired + @Qualifier("searchLineageLineageSearchService") + protected LineageSearchService lineageService; + + @NotNull + @Override + protected LineageSearchService getLineageService() { + return lineageService; + } + + @NotNull + @Override + protected SearchService getSearchService() { + return searchService; + } + + @Test + public void initTest() { + AssertJUnit.assertNotNull(lineageService); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java index 1a6242c2211fd..26c2cf28cdeca 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/LineageServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class LineageServiceOpenSearchTest extends LineageServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java index 559c623c97d5a..42a178893e837 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/OpenSearchSuite.java @@ -10,22 +10,23 @@ @TestConfiguration public class OpenSearchSuite extends AbstractTestNGSpringContextTests { - private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; - private static GenericContainer container; - static { - OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); - } + private static final OpenSearchTestContainer OPENSEARCH_TEST_CONTAINER; + private static GenericContainer container; - @AfterSuite - public void after() { - OPENSEARCH_TEST_CONTAINER.stopContainer(); - } + static { + OPENSEARCH_TEST_CONTAINER = new OpenSearchTestContainer(); + } + + @AfterSuite + public void after() { + OPENSEARCH_TEST_CONTAINER.stopContainer(); + } - @Bean(name = "testSearchContainer") - public GenericContainer testSearchContainer() { - if (container == null) { - container = OPENSEARCH_TEST_CONTAINER.startContainer(); - } - return container; + @Bean(name = "testSearchContainer") + public GenericContainer testSearchContainer() { + if (container == null) { + container = OPENSEARCH_TEST_CONTAINER.startContainer(); } + return container; + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java index 081eb5f70fc85..d358ba177f91d 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SampleDataFixtureOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchService; @@ -13,32 +15,30 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - -/** - * Runs sample data fixture tests for Opensearch test container - */ +/** Runs sample data fixture tests for Opensearch test container */ @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SampleDataFixtureOpenSearchTest extends SampleDataFixtureTestBase { - @Autowired - private RestHighLevelClient searchClient; + @Autowired private RestHighLevelClient searchClient; - @Autowired - @Qualifier("sampleDataSearchService") - protected SearchService searchService; + @Autowired + @Qualifier("sampleDataSearchService") + protected SearchService searchService; - @Autowired - @Qualifier("sampleDataEntityClient") - protected EntityClient entityClient; + @Autowired + @Qualifier("sampleDataEntityClient") + protected EntityClient entityClient; - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; - @Test - public void initTest() { - assertNotNull(searchClient); - } + @Test + public void initTest() { + assertNotNull(searchClient); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java index 0b166975da0d1..7f799d8eebf0a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchDAOOpenSearchTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.opensearch; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.search.query.SearchDAOTestBase; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -12,16 +14,16 @@ import org.springframework.context.annotation.Import; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertNotNull; - - @Getter -@Import({OpenSearchSuite.class, SampleDataFixtureConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SampleDataFixtureConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchDAOOpenSearchTest extends SearchDAOTestBase { - @Autowired - private RestHighLevelClient searchClient; - @Autowired - private SearchConfiguration searchConfiguration; + @Autowired private RestHighLevelClient searchClient; + @Autowired private SearchConfiguration searchConfiguration; + @Autowired @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention; diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java index 8a55ba7b37ef9..1127ba2089a91 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SearchServiceOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class SearchServiceOpenSearchTest extends SearchServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java index f0bb8e1c12479..7ba90319cf1d3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/SystemMetadataServiceOpenSearchTest.java @@ -11,16 +11,12 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; - @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class SystemMetadataServiceOpenSearchTest extends SystemMetadataServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java index 467f7fb43be1b..80db8864014c3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TestEntityOpenSearchTest.java @@ -14,19 +14,18 @@ import org.testng.AssertJUnit; import org.testng.annotations.Test; -@Import({OpenSearchSuite.class, SearchCommonTestConfiguration.class, SearchTestContainerConfiguration.class}) +@Import({ + OpenSearchSuite.class, + SearchCommonTestConfiguration.class, + SearchTestContainerConfiguration.class +}) public class TestEntityOpenSearchTest extends TestEntityTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java index 3333b9f0942f5..63dffa9c21004 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/opensearch/TimeseriesAspectServiceOpenSearchTest.java @@ -14,12 +14,9 @@ @Import({OpenSearchSuite.class, SearchTestContainerConfiguration.class}) public class TimeseriesAspectServiceOpenSearchTest extends TimeseriesAspectServiceTestBase { - @Autowired - private RestHighLevelClient _searchClient; - @Autowired - private ESBulkProcessor _bulkProcessor; - @Autowired - private ESIndexBuilder _esIndexBuilder; + @Autowired private RestHighLevelClient _searchClient; + @Autowired private ESBulkProcessor _bulkProcessor; + @Autowired private ESIndexBuilder _esIndexBuilder; @NotNull @Override diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java index 91e7747afb4a1..a261b53f25c60 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/BrowseDAOTest.java @@ -1,12 +1,18 @@ package com.linkedin.metadata.search.query; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.entity.TestEntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -23,32 +29,24 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; - @Import(SearchCommonTestConfiguration.class) public class BrowseDAOTest extends AbstractTestNGSpringContextTests { private RestHighLevelClient _mockClient; private ESBrowseDAO _browseDAO; - @Autowired - private SearchConfiguration _searchConfiguration; - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; + @Autowired private SearchConfiguration _searchConfiguration; + @Autowired private CustomSearchConfiguration _customSearchConfiguration; @BeforeMethod public void setup() { _mockClient = mock(RestHighLevelClient.class); - _browseDAO = new ESBrowseDAO( - new TestEntityRegistry(), - _mockClient, - new IndexConventionImpl("es_browse_dao_test"), - _searchConfiguration, - _customSearchConfiguration - ); + _browseDAO = + new ESBrowseDAO( + new TestEntityRegistry(), + _mockClient, + new IndexConventionImpl("es_browse_dao_test"), + _searchConfiguration, + _customSearchConfiguration); } public static Urn makeUrn(Object id) { @@ -76,7 +74,7 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field doesn't exist sourceMap.remove("browse_paths"); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); assertEquals(_browseDAO.getBrowsePaths("dataset", dummyUrn).size(), 0); @@ -84,11 +82,11 @@ public void testGetBrowsePath() throws Exception { // Test the case of single search hit & browsePaths field exists sourceMap.put("browsePaths", Collections.singletonList("foo")); when(mockSearchHit.getSourceAsMap()).thenReturn(sourceMap); - when(mockSearchHits.getHits()).thenReturn(new SearchHit[]{mockSearchHit}); + when(mockSearchHits.getHits()).thenReturn(new SearchHit[] {mockSearchHit}); when(mockSearchResponse.getHits()).thenReturn(mockSearchHits); when(_mockClient.search(any(), eq(RequestOptions.DEFAULT))).thenReturn(mockSearchResponse); List browsePaths = _browseDAO.getBrowsePaths("dataset", dummyUrn); assertEquals(browsePaths.size(), 1); assertEquals(browsePaths.get(0), "foo"); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java index 2dbc142d45071..ba909dc3822c5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java @@ -1,5 +1,12 @@ package com.linkedin.metadata.search.query; +import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; +import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.fail; + import com.datahub.test.Snapshot; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.LongMap; @@ -22,286 +29,404 @@ import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.utils.SearchUtil; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH; -import static com.linkedin.metadata.utils.SearchUtil.AGGREGATION_SEPARATOR_CHAR; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.fail; - -abstract public class SearchDAOTestBase extends AbstractTestNGSpringContextTests { - - abstract protected RestHighLevelClient getSearchClient(); - - abstract protected SearchConfiguration getSearchConfiguration(); - - abstract protected IndexConvention getIndexConvention(); - - EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); - - - @Test - public void testTransformFilterForEntitiesNoChange() { - Criterion c = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertEquals(f, transformedFilter); +public abstract class SearchDAOTestBase extends AbstractTestNGSpringContextTests { + + protected abstract RestHighLevelClient getSearchClient(); + + protected abstract SearchConfiguration getSearchConfiguration(); + + protected abstract IndexConvention getIndexConvention(); + + EntityRegistry _entityRegistry = new SnapshotEntityRegistry(new Snapshot()); + + @Test + public void testTransformFilterForEntitiesNoChange() { + Criterion c = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertEquals(f, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesNullFilter() { + Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); + assertNotNull(getIndexConvention()); + assertEquals(null, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithChanges() { + + Criterion c = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesNullFilter() { - Filter transformedFilter = SearchUtil.transformFilterForEntities(null, getIndexConvention()); - assertNotNull(getIndexConvention()); - assertEquals(null, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformFilterForEntitiesWithUnderscore() { + + Criterion c = + new Criterion() + .setValue("data_job") + .setValues(new StringArray(ImmutableList.of("data_job"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithChanges() { - - Criterion c = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datajobindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datajobindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); + + assertEquals(transformedFilter, expectedNewFilter); + } + + @Test + public void testTransformFilterForEntitiesWithSomeChanges() { + + Criterion criterionChanged = + new Criterion() + .setValue("dataset") + .setValues(new StringArray(ImmutableList.of("dataset"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_entityType"); + Criterion criterionUnchanged = + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword"); + + Filter f = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); + Filter originalF = null; + try { + originalF = f.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithUnderscore() { - - Criterion c = new Criterion().setValue("data_job").setValues( - new StringArray(ImmutableList.of("data_job")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(c)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datajobindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datajobindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion)))); - - assertEquals(transformedFilter, expectedNewFilter); + assertEquals(f, originalF); + + Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); + assertNotEquals(originalF, transformedFilter); + + Criterion expectedNewCriterion = + new Criterion() + .setValue("smpldat_datasetindex_v2") + .setValues(new StringArray(ImmutableList.of("smpldat_datasetindex_v2"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("_index"); + + Filter expectedNewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); + + assertEquals(expectedNewFilter, transformedFilter); + } + + @Test + public void testTransformIndexIntoEntityNameSingle() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // Empty aggregations + final SearchResultMetadata searchResultMetadata = + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata(searchResultMetadata) + .setFrom(0) + .setPageSize(100) + .setNumEntities(30); + SearchResult expectedResult = null; + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } - - @Test - public void testTransformFilterForEntitiesWithSomeChanges() { - - Criterion criterionChanged = new Criterion().setValue("dataset").setValues( - new StringArray(ImmutableList.of("dataset")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_entityType"); - Criterion criterionUnchanged = new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword"); - - Filter f = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criterionChanged, criterionUnchanged)))); - Filter originalF = null; - try { - originalF = f.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(f, originalF); - - Filter transformedFilter = SearchUtil.transformFilterForEntities(f, getIndexConvention()); - assertNotEquals(originalF, transformedFilter); - - Criterion expectedNewCriterion = new Criterion().setValue("smpldat_datasetindex_v2").setValues( - new StringArray(ImmutableList.of("smpldat_datasetindex_v2")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("_index"); - - Filter expectedNewFilter = new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(expectedNewCriterion, criterionUnchanged)))); - - assertEquals(expectedNewFilter, transformedFilter); + assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); + + // one facet, do not transform + Map aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); + + List aggregationMetadataList = new ArrayList<>(); + aggregationMetadataList.add( + new AggregationMetadata() + .setName("owners") + .setDisplayName("Owned by") + .setAggregations(new LongMap(aggMap)) + .setFilterValues( + new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + try { + expectedResult = result.copy(); + } catch (CloneNotSupportedException e) { + fail(e.getMessage()); } + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - @Test - public void testTransformIndexIntoEntityNameSingle() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // Empty aggregations - final SearchResultMetadata searchResultMetadata = - new SearchResultMetadata().setAggregations(new AggregationMetadataArray()); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(searchResultMetadata) - .setFrom(0) - .setPageSize(100) - .setNumEntities(30); - SearchResult expectedResult = null; - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(expectedResult, searchDAO.transformIndexIntoEntityName(result)); - - // one facet, do not transform - Map aggMap = Map.of("urn:li:corpuser:datahub", Long.valueOf(3)); - - List aggregationMetadataList = new ArrayList<>(); - aggregationMetadataList.add(new AggregationMetadata().setName("owners") - .setDisplayName("Owned by") - .setAggregations(new LongMap(aggMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(aggMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - try { - expectedResult = result.copy(); - } catch (CloneNotSupportedException e) { - fail(e.getMessage()); - } - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // one facet, transform - Map entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); - - aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType") + // one facet, transform + Map entityTypeMap = Map.of("smpldat_datasetindex_v2", Long.valueOf(3)); + + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") .setDisplayName("Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); - result.setMetadata(searchResultMetadata); - - Map expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); - - List expectedAggregationMetadataList = List.of( - new AggregationMetadata().setName("_entityType") - .setDisplayName("Type") - .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult.setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } - - @Test - public void testTransformIndexIntoEntityNameNested() { - ESSearchDAO searchDAO = new ESSearchDAO(_entityRegistry, getSearchClient(), getIndexConvention(), false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, getSearchConfiguration(), null); - // One nested facet - Map entityTypeMap = Map.of( - String.format("smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "smpldat_datasetindex_v2", Long.valueOf(20) - ); - List aggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + searchResultMetadata.setAggregations(new AggregationMetadataArray(aggregationMetadataList)); + result.setMetadata(searchResultMetadata); + + Map expectedEntityTypeMap = Map.of("dataset", Long.valueOf(3)); + + List expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType") + .setDisplayName("Type") + .setAggregations(new LongMap(expectedEntityTypeMap)) + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult.setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } + + @Test + public void testTransformIndexIntoEntityNameNested() { + ESSearchDAO searchDAO = + new ESSearchDAO( + _entityRegistry, + getSearchClient(), + getIndexConvention(), + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + getSearchConfiguration(), + null); + // One nested facet + Map entityTypeMap = + Map.of( + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "smpldat_datasetindex_v2%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "smpldat_datasetindex_v2", + Long.valueOf(20)); + List aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - SearchResult result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - Map expectedEntityTypeMap = Map.of( - String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "dataset", Long.valueOf(20) - ); - - List expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("_entityType␞owners") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + SearchResult result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + Map expectedEntityTypeMap = + Map.of( + String.format("dataset%surn:li:corpuser:datahub", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("dataset%surn:li:corpuser:bfoo", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "dataset", + Long.valueOf(20)); + + List expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("_entityType␞owners") .setDisplayName("Type␞Owned By") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - SearchResult expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - - // One nested facet, opposite order - entityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - aggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + SearchResult expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + + // One nested facet, opposite order + entityTypeMap = + Map.of( + String.format( + "urn:li:corpuser:datahub%ssmpldat_datasetindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format( + "urn:li:corpuser:datahub%ssmpldat_chartindex_v2", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + aggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(entityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet()))) - ); - result = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(aggregationMetadataList) - )) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - - expectedEntityTypeMap = Map.of( - String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(3), - String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), Long.valueOf(7), - "urn:li:corpuser:datahub", Long.valueOf(20) - ); - - expectedAggregationMetadataList = List.of(new AggregationMetadata().setName("owners␞_entityType") + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters(entityTypeMap, Collections.emptySet())))); + result = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(aggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + + expectedEntityTypeMap = + Map.of( + String.format("urn:li:corpuser:datahub%sdataset", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(3), + String.format("urn:li:corpuser:datahub%schart", AGGREGATION_SEPARATOR_CHAR), + Long.valueOf(7), + "urn:li:corpuser:datahub", + Long.valueOf(20)); + + expectedAggregationMetadataList = + List.of( + new AggregationMetadata() + .setName("owners␞_entityType") .setDisplayName("Owned By␞Type") .setAggregations(new LongMap(expectedEntityTypeMap)) - .setFilterValues(new FilterValueArray(SearchUtil.convertToFilters(expectedEntityTypeMap, Collections.emptySet()))) - ); - expectedResult = new SearchResult().setEntities(new SearchEntityArray(new ArrayList<>())) - .setMetadata(new SearchResultMetadata().setAggregations( - new AggregationMetadataArray(expectedAggregationMetadataList))) - .setFrom(0) - .setPageSize(100) - .setNumEntities(50); - assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); - } + .setFilterValues( + new FilterValueArray( + SearchUtil.convertToFilters( + expectedEntityTypeMap, Collections.emptySet())))); + expectedResult = + new SearchResult() + .setEntities(new SearchEntityArray(new ArrayList<>())) + .setMetadata( + new SearchResultMetadata() + .setAggregations(new AggregationMetadataArray(expectedAggregationMetadataList))) + .setFrom(0) + .setPageSize(100) + .setNumEntities(50); + assertEquals(searchDAO.transformIndexIntoEntityName(result), expectedResult); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java index 66e7b62741f4c..b52f4cd4e92f7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AggregationQueryBuilderTest.java @@ -1,47 +1,45 @@ package com.linkedin.metadata.search.query.request; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.query.request.AggregationQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; import org.testng.Assert; import org.testng.annotations.Test; - public class AggregationQueryBuilderTest { @Test public void testGetDefaultAggregationsHasFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - true, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + true, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List aggs = builder.getAggregations(); @@ -51,27 +49,27 @@ public void testGetDefaultAggregationsHasFields() { @Test public void testGetDefaultAggregationsFields() { - SearchableAnnotation annotation = new SearchableAnnotation( - "test", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - true, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation = + new SearchableAnnotation( + "test", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + true, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation)); List aggs = builder.getAggregations(); @@ -81,56 +79,53 @@ public void testGetDefaultAggregationsFields() { @Test public void testGetSpecificAggregationsHasFields() { - SearchableAnnotation annotation1 = new SearchableAnnotation( - "test1", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.empty(), - Optional.of("Has Test"), - 1.0, - Optional.of("hasTest1"), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); - - SearchableAnnotation annotation2 = new SearchableAnnotation( - "test2", - SearchableAnnotation.FieldType.KEYWORD, - true, - true, - false, - false, - Optional.of("Test Filter"), - Optional.empty(), - 1.0, - Optional.empty(), - Optional.empty(), - Collections.emptyMap(), - Collections.emptyList() - ); + SearchableAnnotation annotation1 = + new SearchableAnnotation( + "test1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.of("Has Test"), + 1.0, + Optional.of("hasTest1"), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); + + SearchableAnnotation annotation2 = + new SearchableAnnotation( + "test2", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.of("Test Filter"), + Optional.empty(), + 1.0, + Optional.empty(), + Optional.empty(), + Collections.emptyMap(), + Collections.emptyList()); SearchConfiguration config = new SearchConfiguration(); config.setMaxTermBucketSize(25); - AggregationQueryBuilder builder = new AggregationQueryBuilder( - config, ImmutableList.of(annotation1, annotation2)); + AggregationQueryBuilder builder = + new AggregationQueryBuilder(config, ImmutableList.of(annotation1, annotation2)); // Case 1: Ask for fields that should exist. - List aggs = builder.getAggregations( - ImmutableList.of("test1", "test2", "hasTest1") - ); + List aggs = + builder.getAggregations(ImmutableList.of("test1", "test2", "hasTest1")); Assert.assertEquals(aggs.size(), 3); Set facets = aggs.stream().map(AggregationBuilder::getName).collect(Collectors.toSet()); Assert.assertEquals(ImmutableSet.of("test1", "test2", "hasTest1"), facets); // Case 2: Ask for fields that should NOT exist. - aggs = builder.getAggregations( - ImmutableList.of("hasTest2") - ); + aggs = builder.getAggregations(ImmutableList.of("hasTest2")); Assert.assertEquals(aggs.size(), 0); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java index 34b98f38254cd..ab832eb1ac24f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/AutocompleteRequestHandlerTest.java @@ -1,10 +1,12 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.linkedin.metadata.TestEntitySpecBuilder; +import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import java.util.List; import java.util.Map; - -import com.linkedin.metadata.search.elasticsearch.query.request.AutocompleteRequestHandler; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -14,12 +16,9 @@ import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - - public class AutocompleteRequestHandlerTest { - private AutocompleteRequestHandler handler = AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); + private AutocompleteRequestHandler handler = + AutocompleteRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec()); @Test public void testDefaultAutocompleteRequest() { @@ -38,7 +37,8 @@ public void testDefaultAutocompleteRequest() { assertTrue(queryFields.containsKey("keyPart1.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("keyPart1.delimited", prefixQuery.fieldName()); assertEquals(query.mustNot().size(), 1); @@ -75,7 +75,8 @@ public void testAutocompleteRequestWithField() { assertTrue(queryFields.containsKey("field.ngram._4gram")); assertEquals(autocompleteQuery.type(), MultiMatchQueryBuilder.Type.BOOL_PREFIX); - MatchPhrasePrefixQueryBuilder prefixQuery = (MatchPhrasePrefixQueryBuilder) query.should().get(0); + MatchPhrasePrefixQueryBuilder prefixQuery = + (MatchPhrasePrefixQueryBuilder) query.should().get(0); assertEquals("field.delimited", prefixQuery.fieldName()); MatchQueryBuilder removedFilter = (MatchQueryBuilder) query.mustNot().get(0); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java index 6b6664ffdf30e..105ee2652dc30 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/CustomizedQueryHandlerTest.java @@ -1,14 +1,21 @@ package com.linkedin.metadata.search.query.request; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.BoolQueryConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.config.search.custom.QueryConfiguration; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.metadata.search.elasticsearch.query.request.CustomizedQueryHandler; import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -17,172 +24,192 @@ import org.opensearch.index.query.functionscore.ScoreFunctionBuilders; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - public class CustomizedQueryHandlerTest { - public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); - private static final CustomSearchConfiguration TEST_CONFIG; - static { - try { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_test.yml"); - TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; - static { - SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); - } - private static final List EXPECTED_CONFIGURATION = List.of( - QueryConfiguration.builder() - .queryRegex("[*]|") - .simpleQuery(false) - .exactMatchQuery(false) - .prefixMatchQuery(false) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.of("match_all", Map.of())), - Map.of( - "weight", 0.5, - "filter", Map.of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 0.5, - "filter", Map.of("term", Map.of( - "deprecated", Map.of("value", true) - ))) - ))) - .build(), - QueryConfiguration.builder() - .queryRegex(".*") - .simpleQuery(true) - .exactMatchQuery(true) - .prefixMatchQuery(true) - .boolQuery(BoolQueryConfiguration.builder() - .must(List.of( - Map.of("term", Map.of("name", "{{query_string}}")) - )) - .build()) - .functionScore(Map.of("score_mode", "avg", "boost_mode", "multiply", - "functions", List.of( - Map.of( - "weight", 1, - "filter", Map.of("match_all", Map.of())), - Map.of( - "weight", 0.5, - "filter", Map.of("term", Map.of( - "materialized", Map.of("value", true) - ))), - Map.of( - "weight", 1.5, - "filter", Map.of("term", Map.of( - "deprecated", Map.of("value", false) - ))) - ))) - .build() - ); - - - @Test - public void configParsingTest() { - assertNotNull(TEST_CONFIG); - assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + public static final ObjectMapper TEST_MAPPER = new YAMLMapper(); + private static final CustomSearchConfiguration TEST_CONFIG; + + static { + try { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_test.yml"); + TEST_CONFIG = customConfiguration.resolve(TEST_MAPPER); + } catch (IOException e) { + throw new RuntimeException(e); } - - @Test - public void customizedQueryHandlerInitTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - assertEquals(test.getQueryConfigurations().stream().map(e -> e.getKey().toString()).collect(Collectors.toList()), - List.of("[*]|", ".*")); - - assertEquals(test.getQueryConfigurations().stream() - .map(e -> Map.entry(e.getKey().toString(), e.getValue())) - .collect(Collectors.toList()), - EXPECTED_CONFIGURATION.stream() - .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) - .collect(Collectors.toList())); - } - - @Test - public void patternMatchTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - - for (String selectAllQuery: List.of("*", "")) { - QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(0), String.format("Failed to match: `%s`", selectAllQuery)); - } - - for (String otherQuery: List.of("foo", "bar")) { - QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); - assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); - } + } + + public static final SearchQueryBuilder SEARCH_QUERY_BUILDER; + + static { + SEARCH_QUERY_BUILDER = new SearchQueryBuilder(new SearchConfiguration(), TEST_CONFIG); + } + + private static final List EXPECTED_CONFIGURATION = + List.of( + QueryConfiguration.builder() + .queryRegex("[*]|") + .simpleQuery(false) + .exactMatchQuery(false) + .prefixMatchQuery(false) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.of("match_all", Map.of())), + Map.of( + "weight", + 0.5, + "filter", + Map.of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 0.5, + "filter", + Map.of( + "term", + Map.of("deprecated", Map.of("value", true))))))) + .build(), + QueryConfiguration.builder() + .queryRegex(".*") + .simpleQuery(true) + .exactMatchQuery(true) + .prefixMatchQuery(true) + .boolQuery( + BoolQueryConfiguration.builder() + .must(List.of(Map.of("term", Map.of("name", "{{query_string}}")))) + .build()) + .functionScore( + Map.of( + "score_mode", + "avg", + "boost_mode", + "multiply", + "functions", + List.of( + Map.of( + "weight", + 1, + "filter", + Map.of("match_all", Map.of())), + Map.of( + "weight", + 0.5, + "filter", + Map.of( + "term", Map.of("materialized", Map.of("value", true)))), + Map.of( + "weight", + 1.5, + "filter", + Map.of( + "term", + Map.of("deprecated", Map.of("value", false))))))) + .build()); + + @Test + public void configParsingTest() { + assertNotNull(TEST_CONFIG); + assertEquals(TEST_CONFIG.getQueryConfigurations(), EXPECTED_CONFIGURATION); + } + + @Test + public void customizedQueryHandlerInitTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> e.getKey().toString()) + .collect(Collectors.toList()), + List.of("[*]|", ".*")); + + assertEquals( + test.getQueryConfigurations().stream() + .map(e -> Map.entry(e.getKey().toString(), e.getValue())) + .collect(Collectors.toList()), + EXPECTED_CONFIGURATION.stream() + .map(cfg -> Map.entry(cfg.getQueryRegex(), cfg)) + .collect(Collectors.toList())); + } + + @Test + public void patternMatchTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + + for (String selectAllQuery : List.of("*", "")) { + QueryConfiguration actual = test.lookupQueryConfig(selectAllQuery).get(); + assertEquals( + actual, + EXPECTED_CONFIGURATION.get(0), + String.format("Failed to match: `%s`", selectAllQuery)); } - @Test - public void functionScoreQueryBuilderTest() { - CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); - MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); - - /* - * Test select star - */ - FunctionScoreQueryBuilder selectStarTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("*").get(), - inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ) - }; - FunctionScoreQueryBuilder expectedSelectStar = new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(selectStarTest, expectedSelectStar); - - /* - * Test default (non-select start) - */ - FunctionScoreQueryBuilder defaultTest = SEARCH_QUERY_BUILDER.functionScoreQueryBuilder(test.lookupQueryConfig("foobar").get(), inputQuery); - - FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - ScoreFunctionBuilders.weightFactorFunction(1f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("materialized", true), - ScoreFunctionBuilders.weightFactorFunction(0.5f) - ), - new FunctionScoreQueryBuilder.FilterFunctionBuilder( - QueryBuilders.termQuery("deprecated", false), - ScoreFunctionBuilders.weightFactorFunction(1.5f) - ) - }; - FunctionScoreQueryBuilder expectedDefault = new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) - .scoreMode(FunctionScoreQuery.ScoreMode.AVG) - .boostMode(CombineFunction.MULTIPLY); - - assertEquals(defaultTest, expectedDefault); + for (String otherQuery : List.of("foo", "bar")) { + QueryConfiguration actual = test.lookupQueryConfig(otherQuery).get(); + assertEquals(actual, EXPECTED_CONFIGURATION.get(1)); } + } + + @Test + public void functionScoreQueryBuilderTest() { + CustomizedQueryHandler test = CustomizedQueryHandler.builder(TEST_CONFIG).build(); + MatchAllQueryBuilder inputQuery = QueryBuilders.matchAllQuery(); + + /* + * Test select star + */ + FunctionScoreQueryBuilder selectStarTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("*").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedSelectStarScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)) + }; + FunctionScoreQueryBuilder expectedSelectStar = + new FunctionScoreQueryBuilder(expectedSelectStarScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(selectStarTest, expectedSelectStar); + + /* + * Test default (non-select start) + */ + FunctionScoreQueryBuilder defaultTest = + SEARCH_QUERY_BUILDER.functionScoreQueryBuilder( + test.lookupQueryConfig("foobar").get(), inputQuery); + + FunctionScoreQueryBuilder.FilterFunctionBuilder[] expectedDefaultScoreFunctions = { + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + ScoreFunctionBuilders.weightFactorFunction(1f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("materialized", true), + ScoreFunctionBuilders.weightFactorFunction(0.5f)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder( + QueryBuilders.termQuery("deprecated", false), + ScoreFunctionBuilders.weightFactorFunction(1.5f)) + }; + FunctionScoreQueryBuilder expectedDefault = + new FunctionScoreQueryBuilder(expectedDefaultScoreFunctions) + .scoreMode(FunctionScoreQuery.ScoreMode.AVG) + .boostMode(CombineFunction.MULTIPLY); + + assertEquals(defaultTest, expectedDefault); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java index 9c0815efdc8b4..42f4f480bfbdd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchQueryBuilderTest.java @@ -1,23 +1,33 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertNull; +import static org.testng.Assert.assertTrue; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableList; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.schema.PathSpec; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; -import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.WordGramConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.TestEntitySpecBuilder; - import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.SearchableFieldSpec; import com.linkedin.metadata.models.annotation.SearchableAnnotation; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchFieldConfig; +import com.linkedin.metadata.search.elasticsearch.query.request.SearchQueryBuilder; +import com.linkedin.util.Pair; +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; import java.io.IOException; import java.util.List; import java.util.Map; @@ -25,9 +35,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.util.Pair; +import org.mockito.Mockito; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -37,28 +45,18 @@ import org.opensearch.index.query.SimpleQueryStringBuilder; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.TEXT_SEARCH_ANALYZER; -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.URN_SEARCH_ANALYZER; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertNull; -import static org.testng.Assert.assertTrue; - @Import(SearchCommonTestConfiguration.class) public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,25 +82,31 @@ public class SearchQueryBuilderTest extends AbstractTestNGSpringContextTests { testQueryConfig.setWordGram(wordGramConfiguration); testQueryConfig.setPartial(partialConfiguration); } - public static final SearchQueryBuilder TEST_BUILDER = new SearchQueryBuilder(testQueryConfig, null); + + public static final SearchQueryBuilder TEST_BUILDER = + new SearchQueryBuilder(testQueryConfig, null); @Test public void testQueryBuilderFulltext() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", - true); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); BoolQueryBuilder analyzerGroupQuery = (BoolQueryBuilder) shouldQueries.get(0); - SimpleQueryStringBuilder keywordQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); + SimpleQueryStringBuilder keywordQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(0); assertEquals(keywordQuery.value(), "testQuery"); assertEquals(keywordQuery.analyzer(), "keyword"); Map keywordFields = keywordQuery.fields(); assertEquals(keywordFields.size(), 9); - assertEquals(keywordFields, Map.of( + assertEquals( + keywordFields, + Map.of( "urn", 10.f, "textArrayField", 1.0f, "customProperties", 1.0f, @@ -111,47 +115,55 @@ public void testQueryBuilderFulltext() { "textFieldOverride", 1.0f, "nestedArrayStringField", 1.0f, "keyPart1", 10.0f, - "esObjectField", 1.0f - )); + "esObjectField", 1.0f)); - SimpleQueryStringBuilder urnComponentQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); + SimpleQueryStringBuilder urnComponentQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(1); assertEquals(urnComponentQuery.value(), "testQuery"); assertEquals(urnComponentQuery.analyzer(), URN_SEARCH_ANALYZER); - assertEquals(urnComponentQuery.fields(), Map.of( + assertEquals( + urnComponentQuery.fields(), + Map.of( "nestedForeignKey", 1.0f, - "foreignKey", 1.0f - )); + "foreignKey", 1.0f)); - SimpleQueryStringBuilder fulltextQuery = (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); + SimpleQueryStringBuilder fulltextQuery = + (SimpleQueryStringBuilder) analyzerGroupQuery.should().get(2); assertEquals(fulltextQuery.value(), "testQuery"); assertEquals(fulltextQuery.analyzer(), TEXT_SEARCH_ANALYZER); - assertEquals(fulltextQuery.fields(), Map.of( + assertEquals( + fulltextQuery.fields(), + Map.of( "textFieldOverride.delimited", 0.4f, "keyPart1.delimited", 4.0f, "nestedArrayArrayField.delimited", 0.4f, "urn.delimited", 7.0f, "textArrayField.delimited", 0.4f, "nestedArrayStringField.delimited", 0.4f, - "wordGramField.delimited", 0.4f - )); + "wordGramField.delimited", 0.4f)); BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(1); assertTrue(boolPrefixQuery.should().size() > 0); - List> prefixFieldWeights = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - MatchPhrasePrefixQueryBuilder builder = (MatchPhrasePrefixQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; - return Pair.of(builder.fieldName(), builder.boost()); - } - }).collect(Collectors.toList()); + List> prefixFieldWeights = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + MatchPhrasePrefixQueryBuilder builder = + (MatchPhrasePrefixQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + TermQueryBuilder builder = (TermQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + MatchPhraseQueryBuilder builder = (MatchPhraseQueryBuilder) prefixQuery; + return Pair.of(builder.fieldName(), builder.boost()); + } + }) + .collect(Collectors.toList()); assertEquals(prefixFieldWeights.size(), 28); @@ -165,19 +177,21 @@ public void testQueryBuilderFulltext() { Pair.of("wordGramField.wordGrams3", 2.25f), Pair.of("wordGramField.wordGrams4", 3.2399998f), Pair.of("wordGramField.keyword", 10.0f), - Pair.of("wordGramField.keyword", 7.0f) - ).forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); + Pair.of("wordGramField.keyword", 7.0f)) + .forEach(p -> assertTrue(prefixFieldWeights.contains(p), "Missing: " + p)); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } @Test public void testQueryBuilderStructured() { FunctionScoreQueryBuilder result = - (FunctionScoreQueryBuilder) TEST_BUILDER.buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), - "testQuery", false); + (FunctionScoreQueryBuilder) + TEST_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), "testQuery", false); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 2); @@ -194,17 +208,20 @@ public void testQueryBuilderStructured() { assertEquals(keywordFields.get("esObjectField").floatValue(), 1.0f); // Validate scorer - FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = result.filterFunctionBuilders(); + FunctionScoreQueryBuilder.FilterFunctionBuilder[] scoringFunctions = + result.filterFunctionBuilders(); assertEquals(scoringFunctions.length, 3); } private static final SearchQueryBuilder TEST_CUSTOM_BUILDER; + static { try { CustomConfiguration customConfiguration = new CustomConfiguration(); customConfiguration.setEnabled(true); customConfiguration.setFile("search_config_builder_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); TEST_CUSTOM_BUILDER = new SearchQueryBuilder(testQueryConfig, customSearchConfiguration); } catch (IOException e) { throw new RuntimeException(e); @@ -214,8 +231,10 @@ public void testQueryBuilderStructured() { @Test public void testCustomSelectAll() { for (String triggerQuery : List.of("*", "")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List shouldQueries = mainQuery.should(); @@ -226,8 +245,10 @@ public void testCustomSelectAll() { @Test public void testCustomExactMatch() { for (String triggerQuery : List.of("test_table", "'single quoted'", "\"double quoted\"")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List shouldQueries = mainQuery.should(); @@ -236,18 +257,22 @@ public void testCustomExactMatch() { BoolQueryBuilder boolPrefixQuery = (BoolQueryBuilder) shouldQueries.get(0); assertTrue(boolPrefixQuery.should().size() > 0); - List queries = boolPrefixQuery.should().stream().map(prefixQuery -> { - if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { - // prefix - return (MatchPhrasePrefixQueryBuilder) prefixQuery; - } else if (prefixQuery instanceof TermQueryBuilder) { - // exact - return (TermQueryBuilder) prefixQuery; - } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { - // ngram - return (MatchPhraseQueryBuilder) prefixQuery; - } - }).collect(Collectors.toList()); + List queries = + boolPrefixQuery.should().stream() + .map( + prefixQuery -> { + if (prefixQuery instanceof MatchPhrasePrefixQueryBuilder) { + // prefix + return (MatchPhrasePrefixQueryBuilder) prefixQuery; + } else if (prefixQuery instanceof TermQueryBuilder) { + // exact + return (TermQueryBuilder) prefixQuery; + } else { // if (prefixQuery instanceof MatchPhraseQueryBuilder) { + // ngram + return (MatchPhraseQueryBuilder) prefixQuery; + } + }) + .collect(Collectors.toList()); assertFalse(queries.isEmpty(), "Expected queries with specific types"); } @@ -256,24 +281,30 @@ public void testCustomExactMatch() { @Test public void testCustomDefault() { for (String triggerQuery : List.of("foo", "bar", "foo\"bar", "foo:bar")) { - FunctionScoreQueryBuilder result = (FunctionScoreQueryBuilder) TEST_CUSTOM_BUILDER - .buildQuery(ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); + FunctionScoreQueryBuilder result = + (FunctionScoreQueryBuilder) + TEST_CUSTOM_BUILDER.buildQuery( + ImmutableList.of(TestEntitySpecBuilder.getSpec()), triggerQuery, true); BoolQueryBuilder mainQuery = (BoolQueryBuilder) result.query(); List shouldQueries = mainQuery.should(); assertEquals(shouldQueries.size(), 3); - List queries = mainQuery.should().stream().map(query -> { - if (query instanceof SimpleQueryStringBuilder) { - return (SimpleQueryStringBuilder) query; - } else if (query instanceof MatchAllQueryBuilder) { - // custom - return (MatchAllQueryBuilder) query; - } else { - // exact - return (BoolQueryBuilder) query; - } - }).collect(Collectors.toList()); + List queries = + mainQuery.should().stream() + .map( + query -> { + if (query instanceof SimpleQueryStringBuilder) { + return (SimpleQueryStringBuilder) query; + } else if (query instanceof MatchAllQueryBuilder) { + // custom + return (MatchAllQueryBuilder) query; + } else { + // exact + return (BoolQueryBuilder) query; + } + }) + .collect(Collectors.toList()); assertEquals(queries.size(), 3, "Expected queries with specific types"); @@ -287,41 +318,52 @@ public void testCustomDefault() { } } - /** - * Tests to make sure that the fields are correctly combined across search-able entities - */ + /** Tests to make sure that the fields are correctly combined across search-able entities */ @Test public void testGetStandardFieldsEntitySpec() { - List entitySpecs = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + List entitySpecs = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) .map(entityRegistry::getEntitySpec) .collect(Collectors.toList()); assertTrue(entitySpecs.size() > 30, "Expected at least 30 searchable entities in the registry"); // Count of the distinct field names - Set expectedFieldNames = Stream.concat( - // Standard urn fields plus entitySpec sourced fields - Stream.of("urn", "urn.delimited"), - entitySpecs.stream() - .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) - .map(SearchFieldConfig::fieldName)) + Set expectedFieldNames = + Stream.concat( + // Standard urn fields plus entitySpec sourced fields + Stream.of("urn", "urn.delimited"), + entitySpecs.stream() + .flatMap(spec -> TEST_CUSTOM_BUILDER.getFieldsFromEntitySpec(spec).stream()) + .map(SearchFieldConfig::fieldName)) .collect(Collectors.toSet()); - Set actualFieldNames = TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() + Set actualFieldNames = + TEST_CUSTOM_BUILDER.getStandardFields(entitySpecs).stream() .map(SearchFieldConfig::fieldName) .collect(Collectors.toSet()); - assertEquals(actualFieldNames, expectedFieldNames, - String.format("Missing: %s Extra: %s", - expectedFieldNames.stream().filter(f -> !actualFieldNames.contains(f)).collect(Collectors.toSet()), - actualFieldNames.stream().filter(f -> !expectedFieldNames.contains(f)).collect(Collectors.toSet()))); + assertEquals( + actualFieldNames, + expectedFieldNames, + String.format( + "Missing: %s Extra: %s", + expectedFieldNames.stream() + .filter(f -> !actualFieldNames.contains(f)) + .collect(Collectors.toSet()), + actualFieldNames.stream() + .filter(f -> !expectedFieldNames.contains(f)) + .collect(Collectors.toSet()))); } @Test public void testGetStandardFields() { - Set fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); + Set fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec())); assertEquals(fieldConfigs.size(), 21); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -344,45 +386,90 @@ public void testGetStandardFields() { "urn", "wordGramField.wordGrams2")); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 10.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(10.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); EntitySpec mockEntitySpec = Mockito.mock(EntitySpec.class); - Mockito.when(mockEntitySpec.getSearchableFieldSpecs()).thenReturn(List.of( - new SearchableFieldSpec( + Mockito.when(mockEntitySpec.getSearchableFieldSpecs()) + .thenReturn( + List.of( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("fieldDoesntExistInOriginal", - SearchableAnnotation.FieldType.TEXT, - true, true, false, false, - Optional.empty(), Optional.empty(), 13.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "fieldDoesntExistInOriginal", + SearchableAnnotation.FieldType.TEXT, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 13.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("keyPart1", - SearchableAnnotation.FieldType.KEYWORD, - true, true, false, false, - Optional.empty(), Optional.empty(), 20.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), + new SearchableAnnotation( + "keyPart1", + SearchableAnnotation.FieldType.KEYWORD, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 20.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), Mockito.mock(DataSchema.class)), - new SearchableFieldSpec( + new SearchableFieldSpec( Mockito.mock(PathSpec.class), - new SearchableAnnotation("textFieldOverride", - SearchableAnnotation.FieldType.WORD_GRAM, - true, true, false, false, - Optional.empty(), Optional.empty(), 3.0, - Optional.empty(), Optional.empty(), Map.of(), List.of()), - Mockito.mock(DataSchema.class))) - ); - - fieldConfigs = TEST_CUSTOM_BUILDER.getStandardFields(ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); - // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the textFieldOverride + new SearchableAnnotation( + "textFieldOverride", + SearchableAnnotation.FieldType.WORD_GRAM, + true, + true, + false, + false, + Optional.empty(), + Optional.empty(), + 3.0, + Optional.empty(), + Optional.empty(), + Map.of(), + List.of()), + Mockito.mock(DataSchema.class)))); + + fieldConfigs = + TEST_CUSTOM_BUILDER.getStandardFields( + ImmutableList.of(TestEntitySpecBuilder.getSpec(), mockEntitySpec)); + // Same 21 from the original entity + newFieldNotInOriginal + 3 word gram fields from the + // textFieldOverride assertEquals(fieldConfigs.size(), 26); - assertEquals(fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), Set.of( + assertEquals( + fieldConfigs.stream().map(SearchFieldConfig::fieldName).collect(Collectors.toSet()), + Set.of( "nestedArrayArrayField", "esObjectField", "foreignKey", @@ -411,13 +498,25 @@ public void testGetStandardFields() { "textFieldOverride.wordGrams4")); // Field which only exists in first one: Should be the same - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("nestedForeignKey")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 1.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("nestedForeignKey")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(1.0F)); // Average boost value: 10 vs. 20 -> 15 - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("keyPart1")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 15.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("keyPart1")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(15.0F)); // Field which added word gram fields: Original boost should be boost value averaged - assertEquals(fieldConfigs.stream().filter(field -> field.fieldName().equals("textFieldOverride")).findFirst().map(SearchFieldConfig::boost), Optional.of( - 2.0F)); + assertEquals( + fieldConfigs.stream() + .filter(field -> field.fieldName().equals("textFieldOverride")) + .findFirst() + .map(SearchFieldConfig::boost), + Optional.of(2.0F)); } -} \ No newline at end of file +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index 0ea035a10f91d..3afb04afb917b 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -1,14 +1,26 @@ package com.linkedin.metadata.search.query.request; +import static com.linkedin.metadata.utils.SearchUtil.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.TestEntitySpecBuilder; import com.linkedin.metadata.config.search.ExactMatchConfiguration; import com.linkedin.metadata.config.search.PartialConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; -import com.google.common.collect.ImmutableList; -import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.config.search.WordGramConfiguration; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import com.linkedin.metadata.TestEntitySpecBuilder; -import com.linkedin.metadata.config.search.WordGramConfiguration; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -19,16 +31,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.models.EntitySpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.ExistsQueryBuilder; @@ -45,16 +47,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.linkedin.metadata.utils.SearchUtil.*; -import static org.testng.Assert.*; - - @Import(SearchCommonTestConfiguration.class) public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; + @Autowired private EntityRegistry entityRegistry; public static SearchConfiguration testQueryConfig; + static { testQueryConfig = new SearchConfiguration(); testQueryConfig.setMaxTermBucketSize(20); @@ -84,29 +82,42 @@ public class SearchRequestHandlerTest extends AbstractTestNGSpringContextTests { @Test public void testDatasetFieldsAndHighlights() { EntitySpec entitySpec = entityRegistry.getEntitySpec("dataset"); - SearchRequestHandler datasetHandler = SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); + SearchRequestHandler datasetHandler = + SearchRequestHandler.getBuilder(entitySpec, testQueryConfig, null); /* - Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage - */ - List highlightFields = datasetHandler.getHighlights().fields().stream() + Ensure efficient query performance, we do not expect upstream/downstream/fineGrained lineage + */ + List highlightFields = + datasetHandler.getHighlights().fields().stream() .map(HighlightBuilder.Field::name) .collect(Collectors.toList()); - assertTrue(highlightFields.stream().noneMatch( - fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream") - ), "unexpected lineage fields in highlights: " + highlightFields); + assertTrue( + highlightFields.stream() + .noneMatch( + fieldName -> fieldName.contains("upstream") || fieldName.contains("downstream")), + "unexpected lineage fields in highlights: " + highlightFields); } @Test public void testSearchRequestHandlerHighlightingTurnedOff() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false).setSkipHighlighting(true), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(false).setSkipHighlighting(true), + null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index assertEquals(aggBuilders.size(), 2); for (AggregationBuilder aggBuilder : aggBuilders) { @@ -123,44 +134,73 @@ public void testSearchRequestHandlerHighlightingTurnedOff() { @Test public void testSearchRequestHandler() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - SearchRequest searchRequest = requestHandler.getSearchRequest("testQuery", null, null, 0, - 10, new SearchFlags().setFulltext(false), null); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "testQuery", null, null, 0, 10, new SearchFlags().setFulltext(false), null); SearchSourceBuilder sourceBuilder = searchRequest.source(); assertEquals(sourceBuilder.from(), 0); assertEquals(sourceBuilder.size(), 10); // Filters - Collection aggBuilders = sourceBuilder.aggregations().getAggregatorFactories(); + Collection aggBuilders = + sourceBuilder.aggregations().getAggregatorFactories(); // Expect 2 aggregations: textFieldOverride and _index - assertEquals(aggBuilders.size(), 2); - for (AggregationBuilder aggBuilder : aggBuilders) { - if (aggBuilder.getName().equals("textFieldOverride")) { - TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; - assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); - } else if (!aggBuilder.getName().equals("_entityType")) { - fail("Found unexepected aggregation: " + aggBuilder.getName()); - } - } + assertEquals(aggBuilders.size(), 2); + for (AggregationBuilder aggBuilder : aggBuilders) { + if (aggBuilder.getName().equals("textFieldOverride")) { + TermsAggregationBuilder filterPanelBuilder = (TermsAggregationBuilder) aggBuilder; + assertEquals(filterPanelBuilder.field(), "textFieldOverride.keyword"); + } else if (!aggBuilder.getName().equals("_entityType")) { + fail("Found unexepected aggregation: " + aggBuilder.getName()); + } + } // Highlights HighlightBuilder highlightBuilder = sourceBuilder.highlighter(); List fields = - highlightBuilder.fields().stream().map(HighlightBuilder.Field::name).collect(Collectors.toList()); + highlightBuilder.fields().stream() + .map(HighlightBuilder.Field::name) + .collect(Collectors.toList()); assertEquals(fields.size(), 22); List highlightableFields = - ImmutableList.of("keyPart1", "textArrayField", "textFieldOverride", "foreignKey", "nestedForeignKey", - "nestedArrayStringField", "nestedArrayArrayField", "customProperties", "esObjectField", "wordGramField"); - highlightableFields.forEach(field -> { - assertTrue(fields.contains(field), "Missing: " + field); - assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); - }); + ImmutableList.of( + "keyPart1", + "textArrayField", + "textFieldOverride", + "foreignKey", + "nestedForeignKey", + "nestedArrayStringField", + "nestedArrayArrayField", + "customProperties", + "esObjectField", + "wordGramField"); + highlightableFields.forEach( + field -> { + assertTrue(fields.contains(field), "Missing: " + field); + assertTrue(fields.contains(field + ".*"), "Missing: " + field + ".*"); + }); } @Test public void testAggregationsInSearch() { - SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - final String nestedAggString = String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); - SearchRequest searchRequest = requestHandler.getSearchRequest("*", null, null, 0, - 10, new SearchFlags().setFulltext(true), List.of("textFieldOverride", "_entityType", nestedAggString, AGGREGATION_SEPARATOR_CHAR, "not_a_facet")); + SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final String nestedAggString = + String.format("_entityType%stextFieldOverride", AGGREGATION_SEPARATOR_CHAR); + SearchRequest searchRequest = + requestHandler.getSearchRequest( + "*", + null, + null, + 0, + 10, + new SearchFlags().setFulltext(true), + List.of( + "textFieldOverride", + "_entityType", + nestedAggString, + AGGREGATION_SEPARATOR_CHAR, + "not_a_facet")); SearchSourceBuilder sourceBuilder = searchRequest.source(); // Filters Collection aggregationBuilders = @@ -168,17 +208,28 @@ public void testAggregationsInSearch() { assertEquals(aggregationBuilders.size(), 3); // Expected aggregations - AggregationBuilder expectedTextFieldAggregationBuilder = AggregationBuilders.terms("textFieldOverride") - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize()); - AggregationBuilder expectedEntityTypeAggregationBuilder = AggregationBuilders.terms("_entityType") - .field("_index").size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0); - AggregationBuilder expectedNestedAggregationBuilder = AggregationBuilders.terms(nestedAggString).field("_index") - .size(testQueryConfig.getMaxTermBucketSize()).minDocCount(0) - .subAggregation(AggregationBuilders.terms(nestedAggString) - .field("textFieldOverride.keyword").size(testQueryConfig.getMaxTermBucketSize())); + AggregationBuilder expectedTextFieldAggregationBuilder = + AggregationBuilders.terms("textFieldOverride") + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize()); + AggregationBuilder expectedEntityTypeAggregationBuilder = + AggregationBuilders.terms("_entityType") + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0); + AggregationBuilder expectedNestedAggregationBuilder = + AggregationBuilders.terms(nestedAggString) + .field("_index") + .size(testQueryConfig.getMaxTermBucketSize()) + .minDocCount(0) + .subAggregation( + AggregationBuilders.terms(nestedAggString) + .field("textFieldOverride.keyword") + .size(testQueryConfig.getMaxTermBucketSize())); for (AggregationBuilder builder : aggregationBuilders) { - if (builder.getName().equals("textFieldOverride") || builder.getName().equals("_entityType")) { + if (builder.getName().equals("textFieldOverride") + || builder.getName().equals("_entityType")) { assertTrue(builder.getSubAggregations().isEmpty()); if (builder.getName().equalsIgnoreCase("textFieldOverride")) { assertEquals(builder, expectedTextFieldAggregationBuilder); @@ -200,7 +251,8 @@ public void testAggregationsInSearch() { @Test public void testFilteredSearch() { - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); final BoolQueryBuilder testQuery = constructFilterQuery(requestHandler, false); @@ -210,7 +262,6 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemoved); - final BoolQueryBuilder testQueryScroll = constructFilterQuery(requestHandler, true); testFilterQuery(testQueryScroll); @@ -220,138 +271,190 @@ public void testFilteredSearch() { testRemovedQuery(queryWithRemovedScroll); } - private BoolQueryBuilder constructFilterQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); + private BoolQueryBuilder constructFilterQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); - final Filter filterWithoutRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); + final Filter filterWithoutRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); final BoolQueryBuilder testQuery; if (scroll) { - testQuery = (BoolQueryBuilder) requestHandler - .getSearchRequest("testQuery", filterWithoutRemovedCondition, null, null, null, - "5m", 10, new SearchFlags().setFulltext(false)) - .source() - .query(); + testQuery = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { testQuery = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithoutRemovedCondition, null, - 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithoutRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return testQuery; } private void testFilterQuery(BoolQueryBuilder testQuery) { - Optional mustNotHaveRemovedCondition = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertTrue(mustNotHaveRemovedCondition.isPresent(), "Expected must not have removed condition to exist" - + " if filter does not have it"); + Optional mustNotHaveRemovedCondition = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertTrue( + mustNotHaveRemovedCondition.isPresent(), + "Expected must not have removed condition to exist" + " if filter does not have it"); } - private BoolQueryBuilder constructRemovedQuery(SearchRequestHandler requestHandler, boolean scroll) { - final Criterion filterCriterion = new Criterion() - .setField("keyword") - .setCondition(Condition.EQUAL) - .setValue("some value"); - - final Criterion removedCriterion = new Criterion() - .setField("removed") - .setCondition(Condition.EQUAL) - .setValue(String.valueOf(false)); - - final Filter filterWithRemovedCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion, removedCriterion))) - )); + private BoolQueryBuilder constructRemovedQuery( + SearchRequestHandler requestHandler, boolean scroll) { + final Criterion filterCriterion = + new Criterion().setField("keyword").setCondition(Condition.EQUAL).setValue("some value"); + + final Criterion removedCriterion = + new Criterion() + .setField("removed") + .setCondition(Condition.EQUAL) + .setValue(String.valueOf(false)); + + final Filter filterWithRemovedCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(filterCriterion, removedCriterion))))); final BoolQueryBuilder queryWithRemoved; if (scroll) { - queryWithRemoved = (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, null, null, "5m", 10, new SearchFlags().setFulltext(false)).source().query(); + queryWithRemoved = + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + null, + null, + "5m", + 10, + new SearchFlags().setFulltext(false)) + .source() + .query(); } else { queryWithRemoved = - (BoolQueryBuilder) requestHandler.getSearchRequest("testQuery", filterWithRemovedCondition, - null, 0, 10, new SearchFlags().setFulltext(false), null).source().query(); + (BoolQueryBuilder) + requestHandler + .getSearchRequest( + "testQuery", + filterWithRemovedCondition, + null, + 0, + 10, + new SearchFlags().setFulltext(false), + null) + .source() + .query(); } return queryWithRemoved; } private void testRemovedQuery(BoolQueryBuilder queryWithRemoved) { - Optional mustNotHaveRemovedCondition = queryWithRemoved.must() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .map(or -> (BoolQueryBuilder) or) - .flatMap(or -> { - System.out.println("processing: " + or.mustNot()); - return or.mustNot().stream(); - }) - .filter(and -> and instanceof MatchQueryBuilder) - .map(and -> (MatchQueryBuilder) and) - .filter(match -> match.fieldName().equals("removed")) - .findAny(); - - assertFalse(mustNotHaveRemovedCondition.isPresent(), "Expect `must not have removed` condition to not" - + " exist because filter already has it a condition for the removed property"); + Optional mustNotHaveRemovedCondition = + queryWithRemoved.must().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .map(or -> (BoolQueryBuilder) or) + .flatMap( + or -> { + System.out.println("processing: " + or.mustNot()); + return or.mustNot().stream(); + }) + .filter(and -> and instanceof MatchQueryBuilder) + .map(and -> (MatchQueryBuilder) and) + .filter(match -> match.fieldName().equals("removed")) + .findAny(); + + assertFalse( + mustNotHaveRemovedCondition.isPresent(), + "Expect `must not have removed` condition to not" + + " exist because filter already has it a condition for the removed property"); } // For fields that are one of EDITABLE_FIELD_TO_QUERY_PAIRS, we want to make sure - // a filter that has a list of values like below will filter on all values by generating a terms query + // a filter that has a list of values like below will filter on all values by generating a terms + // query // field EQUAL [value1, value2, ...] @Test public void testFilterFieldTagsByValues() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1") - .setValues(new StringArray("v1", "v2")); + final Criterion filterCriterion = + new Criterion() + .setField("fieldTags") + .setCondition(Condition.EQUAL) + .setValue("v1") + .setValues(new StringArray("v1", "v2")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [terms] - List termsQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof TermsQueryBuilder) - .map(should -> (TermsQueryBuilder) should) - .collect(Collectors.toList()); + List termsQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof TermsQueryBuilder) + .map(should -> (TermsQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 2, "Expected to find two terms queries"); Map> termsMap = new HashMap<>(); - termsQueryBuilders.forEach(termsQueryBuilder -> { - String field = termsQueryBuilder.fieldName(); - List values = termsQueryBuilder.values(); - List strValues = new ArrayList<>(); - for (Object value : values) { - assertTrue(value instanceof String, - "Expected value to be String, got: " + value.getClass()); - strValues.add((String) value); - } - Collections.sort(strValues); - termsMap.put(field, strValues); - }); + termsQueryBuilders.forEach( + termsQueryBuilder -> { + String field = termsQueryBuilder.fieldName(); + List values = termsQueryBuilder.values(); + List strValues = new ArrayList<>(); + for (Object value : values) { + assertTrue( + value instanceof String, "Expected value to be String, got: " + value.getClass()); + strValues.add((String) value); + } + Collections.sort(strValues); + termsMap.put(field, strValues); + }); assertTrue(termsMap.containsKey("fieldTags.keyword")); assertTrue(termsMap.containsKey("editedFieldTags.keyword")); @@ -367,35 +470,35 @@ public void testFilterFieldTagsByValues() { // pair of fields @Test public void testFilterFieldTagsByValue() { - final Criterion filterCriterion = new Criterion() - .setField("fieldTags") - .setCondition(Condition.EQUAL) - .setValue("v1"); + final Criterion filterCriterion = + new Criterion().setField("fieldTags").setCondition(Condition.EQUAL).setValue("v1"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); - // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> [match] - List matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + // bool -> must -> [bool] -> should -> [bool] -> must -> [bool] -> should -> [bool] -> should -> + // [match] + List matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 2, "Expected to find two match queries"); Map matchMap = new HashMap<>(); - matchQueryBuilders.forEach(matchQueryBuilder -> { - Set fields = matchQueryBuilder.fields().keySet(); - assertTrue(matchQueryBuilder.value() instanceof String); - fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); - }); + matchQueryBuilders.forEach( + matchQueryBuilder -> { + Set fields = matchQueryBuilder.fields().keySet(); + assertTrue(matchQueryBuilder.value() instanceof String); + fields.forEach(field -> matchMap.put(field, (String) matchQueryBuilder.value())); + }); assertTrue(matchMap.containsKey("fieldTags.keyword")); assertTrue(matchMap.containsKey("editedFieldTags.keyword")); @@ -407,65 +510,68 @@ public void testFilterFieldTagsByValue() { // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a single value @Test public void testFilterPlatformByValue() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql"); + final Criterion filterCriterion = + new Criterion().setField("platform").setCondition(Condition.EQUAL).setValue("mysql"); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [bool] -> should -> [match] - List matchQueryBuilders = testQuery.filter() - .stream() - .filter(or -> or instanceof BoolQueryBuilder) - .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof MultiMatchQueryBuilder) - .map(should -> (MultiMatchQueryBuilder) should) - .collect(Collectors.toList()); + List matchQueryBuilders = + testQuery.filter().stream() + .filter(or -> or instanceof BoolQueryBuilder) + .flatMap(or -> ((BoolQueryBuilder) or).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof MultiMatchQueryBuilder) + .map(should -> (MultiMatchQueryBuilder) should) + .collect(Collectors.toList()); assertTrue(matchQueryBuilders.size() == 1, "Expected to find one match query"); MultiMatchQueryBuilder matchQueryBuilder = matchQueryBuilders.get(0); - assertEquals(matchQueryBuilder.fields(), Map.of( + assertEquals( + matchQueryBuilder.fields(), + Map.of( "platform", 1.0f, - "platform.*", 1.0f) - ); + "platform.*", 1.0f)); assertEquals(matchQueryBuilder.value(), "mysql"); } // Test fields not in EDITABLE_FIELD_TO_QUERY_PAIRS with a list of values @Test public void testFilterPlatformByValues() { - final Criterion filterCriterion = new Criterion() - .setField("platform") - .setCondition(Condition.EQUAL) - .setValue("mysql") - .setValues(new StringArray("mysql", "bigquery")); + final Criterion filterCriterion = + new Criterion() + .setField("platform") + .setCondition(Condition.EQUAL) + .setValue("mysql") + .setValues(new StringArray("mysql", "bigquery")); final BoolQueryBuilder testQuery = getQuery(filterCriterion); // bool -> filter -> [bool] -> should -> [bool] -> filter -> [terms] - List termsQueryBuilders = testQuery.filter() - .stream() - .filter(must -> must instanceof BoolQueryBuilder) - .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) - .filter(should -> should instanceof BoolQueryBuilder) - .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) - .filter(must -> must instanceof TermsQueryBuilder) - .map(must -> (TermsQueryBuilder) must) - .collect(Collectors.toList()); + List termsQueryBuilders = + testQuery.filter().stream() + .filter(must -> must instanceof BoolQueryBuilder) + .flatMap(must -> ((BoolQueryBuilder) must).should().stream()) + .filter(should -> should instanceof BoolQueryBuilder) + .flatMap(should -> ((BoolQueryBuilder) should).filter().stream()) + .filter(must -> must instanceof TermsQueryBuilder) + .map(must -> (TermsQueryBuilder) must) + .collect(Collectors.toList()); assertTrue(termsQueryBuilders.size() == 1, "Expected to find one terms query"); final TermsQueryBuilder termsQueryBuilder = termsQueryBuilders.get(0); assertEquals(termsQueryBuilder.fieldName(), "platform.keyword"); Set values = new HashSet<>(); - termsQueryBuilder.values().forEach(value -> { - assertTrue(value instanceof String); - values.add((String) value); - }); + termsQueryBuilder + .values() + .forEach( + value -> { + assertTrue(value instanceof String); + values.add((String) value); + }); assertEquals(values.size(), 2, "Expected two platform filter values"); assertTrue(values.contains("mysql")); @@ -511,18 +617,20 @@ public void testBrowsePathQueryFilter() { } private BoolQueryBuilder getQuery(final Criterion filterCriterion) { - final Filter filter = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))) - )); - - final SearchRequestHandler requestHandler = SearchRequestHandler.getBuilder( - TestEntitySpecBuilder.getSpec(), testQueryConfig, null); - - return (BoolQueryBuilder) requestHandler - .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) - .source() - .query(); + final Filter filter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))))); + + final SearchRequestHandler requestHandler = + SearchRequestHandler.getBuilder(TestEntitySpecBuilder.getSpec(), testQueryConfig, null); + + return (BoolQueryBuilder) + requestHandler + .getSearchRequest("", filter, null, 0, 10, new SearchFlags().setFulltext(false), null) + .source() + .query(); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java index 8888ef59ad7d2..6e2d90287d5d9 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformerTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.transformer; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.test.TestEntitySnapshot; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -13,28 +18,29 @@ import com.linkedin.metadata.models.EntitySpec; import java.io.IOException; import java.util.Optional; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertFalse; -import static org.testng.Assert.assertTrue; - - public class SearchDocumentTransformerTest { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } @Test public void testTransform() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -65,10 +71,12 @@ public void testTransform() throws IOException { @Test public void testTransformForDelete() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 1000); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 1000); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); + Optional result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, true); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); assertEquals(parsedJson.get("urn").asText(), snapshot.getUrn().toString()); @@ -86,14 +94,18 @@ public void testTransformForDelete() throws IOException { @Test public void testTransformMaxFieldValue() throws IOException { - SearchDocumentTransformer searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 5); + SearchDocumentTransformer searchDocumentTransformer = + new SearchDocumentTransformer(1000, 1000, 5); TestEntitySnapshot snapshot = TestEntityUtil.getSnapshot(); EntitySpec testEntitySpec = TestEntitySpecBuilder.getSpec(); - Optional result = searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); + Optional result = + searchDocumentTransformer.transformSnapshot(snapshot, testEntitySpec, false); assertTrue(result.isPresent()); ObjectNode parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode().add("shortValue=123")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance.arrayNode().add("shortValue=123")); assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode().add("123")); searchDocumentTransformer = new SearchDocumentTransformer(1000, 1000, 20); @@ -103,10 +115,21 @@ public void testTransformMaxFieldValue() throws IOException { assertTrue(result.isPresent()); parsedJson = (ObjectNode) OBJECT_MAPPER.readTree(result.get()); - - assertEquals(parsedJson.get("customProperties"), JsonNodeFactory.instance.arrayNode() - .add("key1=value1").add("key2=value2").add("shortValue=123").add("longValue=0123456789")); - assertEquals(parsedJson.get("esObjectField"), JsonNodeFactory.instance.arrayNode() - .add("value1").add("value2").add("123").add("0123456789")); + assertEquals( + parsedJson.get("customProperties"), + JsonNodeFactory.instance + .arrayNode() + .add("key1=value1") + .add("key2=value2") + .add("shortValue=123") + .add("longValue=0123456789")); + assertEquals( + parsedJson.get("esObjectField"), + JsonNodeFactory.instance + .arrayNode() + .add("value1") + .add("value2") + .add("123") + .add("0123456789")); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java index 6127326db8ab9..e4e0d00391fa5 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathUtilsTest.java @@ -14,8 +14,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - - public class BrowsePathUtilsTest { private final EntityRegistry registry = new TestEntityRegistry(); @@ -24,43 +22,40 @@ public class BrowsePathUtilsTest { public void testGetDefaultBrowsePath() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); String datasetPath = BrowsePathUtils.getDefaultBrowsePath(datasetUrn, this.registry, '.'); Assert.assertEquals(datasetPath, "/prod/kafka/test/a"); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); String chartPath = BrowsePathUtils.getDefaultBrowsePath(chartUrn, this.registry, '/'); Assert.assertEquals(chartPath, "/looker"); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); String dashboardPath = BrowsePathUtils.getDefaultBrowsePath(dashboardUrn, this.registry, '/'); Assert.assertEquals(dashboardPath, "/looker"); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); String dataFlowPath = BrowsePathUtils.getDefaultBrowsePath(dataFlowUrn, this.registry, '/'); Assert.assertEquals(dataFlowPath, "/airflow/test"); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); String dataJobPath = BrowsePathUtils.getDefaultBrowsePath(dataJobUrn, this.registry, '/'); Assert.assertEquals(dataJobPath, "/airflow/test"); @@ -69,46 +64,42 @@ public void testGetDefaultBrowsePath() throws URISyntaxException { @Test public void testBuildDataPlatformUrn() throws URISyntaxException { // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); Urn dataPlatformUrn1 = BrowsePathUtils.buildDataPlatformUrn(datasetUrn, this.registry); Assert.assertEquals(dataPlatformUrn1, Urn.createFromString("urn:li:dataPlatform:kafka")); // Charts - ChartKey chartKey = new ChartKey() - .setChartId("Test/A/B") - .setDashboardTool("looker"); + ChartKey chartKey = new ChartKey().setChartId("Test/A/B").setDashboardTool("looker"); Urn chartUrn = EntityKeyUtils.convertEntityKeyToUrn(chartKey, "chart"); Urn dataPlatformUrn2 = BrowsePathUtils.buildDataPlatformUrn(chartUrn, this.registry); Assert.assertEquals(dataPlatformUrn2, Urn.createFromString("urn:li:dataPlatform:looker")); // Dashboards - DashboardKey dashboardKey = new DashboardKey() - .setDashboardId("Test/A/B") - .setDashboardTool("looker"); + DashboardKey dashboardKey = + new DashboardKey().setDashboardId("Test/A/B").setDashboardTool("looker"); Urn dashboardUrn = EntityKeyUtils.convertEntityKeyToUrn(dashboardKey, "dashboard"); Urn dataPlatformUrn3 = BrowsePathUtils.buildDataPlatformUrn(dashboardUrn, this.registry); Assert.assertEquals(dataPlatformUrn3, Urn.createFromString("urn:li:dataPlatform:looker")); // Data Flows - DataFlowKey dataFlowKey = new DataFlowKey() - .setCluster("test") - .setFlowId("Test/A/B") - .setOrchestrator("airflow"); + DataFlowKey dataFlowKey = + new DataFlowKey().setCluster("test").setFlowId("Test/A/B").setOrchestrator("airflow"); Urn dataFlowUrn = EntityKeyUtils.convertEntityKeyToUrn(dataFlowKey, "dataFlow"); Urn dataPlatformUrn4 = BrowsePathUtils.buildDataPlatformUrn(dataFlowUrn, this.registry); Assert.assertEquals(dataPlatformUrn4, Urn.createFromString("urn:li:dataPlatform:airflow")); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = + new DataJobKey() + .setFlow(Urn.createFromString("urn:li:dataFlow:(airflow,Test/A/B,test)")) + .setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); Urn dataPlatformUrn5 = BrowsePathUtils.buildDataPlatformUrn(dataJobUrn, this.registry); Assert.assertEquals(dataPlatformUrn5, Urn.createFromString("urn:li:dataPlatform:airflow")); - } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java index 8a85ae0396ee1..3041b13839768 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/BrowsePathV2UtilsTest.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.linkedin.common.BrowsePathEntry; import com.linkedin.common.BrowsePathEntryArray; import com.linkedin.common.BrowsePathsV2; @@ -17,22 +22,17 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.EntityKeyUtils; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.eq; +import org.testng.Assert; +import org.testng.annotations.Test; public class BrowsePathV2UtilsTest { - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test.a.b,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; @@ -46,12 +46,16 @@ public void testGetDefaultDatasetBrowsePathV2WithContainers() throws URISyntaxEx Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -62,9 +66,12 @@ public void testGetDefaultDatasetBrowsePathV2WithContainersFlagOff() throws URIS Urn datasetUrn = UrnUtils.getUrn(DATASET_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(datasetUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, false); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + datasetUrn, this.registry, '.', mockService, false); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -78,12 +85,16 @@ public void testGetDefaultChartBrowsePathV2WithContainers() throws URISyntaxExce Urn chartUrn = UrnUtils.getUrn(CHART_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(chartUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -94,12 +105,17 @@ public void testGetDefaultDashboardBrowsePathV2WithContainers() throws URISyntax Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); final Urn containerUrn1 = UrnUtils.getUrn(CONTAINER_URN1); final Urn containerUrn2 = UrnUtils.getUrn(CONTAINER_URN2); - EntityService mockService = initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); + EntityService mockService = + initMockServiceWithContainerParents(dashboardUrn, containerUrn1, containerUrn2); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '.', mockService, true); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); - BrowsePathEntry entry1 = new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); - BrowsePathEntry entry2 = new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); + BrowsePathEntry entry1 = + new BrowsePathEntry().setId(containerUrn1.toString()).setUrn(containerUrn1); + BrowsePathEntry entry2 = + new BrowsePathEntry().setId(containerUrn2.toString()).setUrn(containerUrn2); expectedPath.add(entry2); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); @@ -110,15 +126,19 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept EntityService mockService = mock(EntityService.class); // Datasets - DatasetKey datasetKey = new DatasetKey() - .setName("Test.A.B") - .setOrigin(FabricType.PROD) - .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); + DatasetKey datasetKey = + new DatasetKey() + .setName("Test.A.B") + .setOrigin(FabricType.PROD) + .setPlatform(Urn.createFromString("urn:li:dataPlatform:kafka")); Urn datasetUrn = EntityKeyUtils.convertEntityKeyToUrn(datasetKey, "dataset"); - when( - mockService.getEntityV2(eq(datasetUrn.getEntityType()), eq(datasetUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - BrowsePathsV2 browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); + when(mockService.getEntityV2( + eq(datasetUrn.getEntityType()), + eq(datasetUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + BrowsePathsV2 browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(datasetUrn, this.registry, '.', mockService, true); BrowsePathEntryArray expectedPath = new BrowsePathEntryArray(); BrowsePathEntry entry1 = new BrowsePathEntry().setId("test"); BrowsePathEntry entry2 = new BrowsePathEntry().setId("a"); @@ -128,10 +148,13 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Charts Urn chartUrn = UrnUtils.getUrn(CHART_URN); - when( - mockService.getEntityV2(eq(chartUrn.getEntityType()), eq(chartUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(chartUrn.getEntityType()), + eq(chartUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(chartUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -139,10 +162,14 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Dashboards Urn dashboardUrn = UrnUtils.getUrn(DASHBOARD_URN); - when( - mockService.getEntityV2(eq(dashboardUrn.getEntityType()), eq(dashboardUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dashboardUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dashboardUrn.getEntityType()), + eq(dashboardUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dashboardUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); @@ -150,52 +177,64 @@ public void testGetDefaultBrowsePathV2WithoutContainers() throws URISyntaxExcept // Data Flows Urn dataFlowUrn = UrnUtils.getUrn(DATA_FLOW_URN); - when( - mockService.getEntityV2(eq(dataFlowUrn.getEntityType()), eq(dataFlowUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataFlowUrn, this.registry, '/', mockService, true); + when(mockService.getEntityV2( + eq(dataFlowUrn.getEntityType()), + eq(dataFlowUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2( + dataFlowUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId("Default"); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); // Data Jobs - DataJobKey dataJobKey = new DataJobKey() - .setFlow(dataFlowUrn) - .setJobId("Job/A/B"); + DataJobKey dataJobKey = new DataJobKey().setFlow(dataFlowUrn).setJobId("Job/A/B"); Urn dataJobUrn = EntityKeyUtils.convertEntityKeyToUrn(dataJobKey, "dataJob"); - browsePathsV2 = BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); + browsePathsV2 = + BrowsePathV2Utils.getDefaultBrowsePathV2(dataJobUrn, this.registry, '/', mockService, true); expectedPath = new BrowsePathEntryArray(); entry1 = new BrowsePathEntry().setId(dataFlowUrn.toString()).setUrn(dataFlowUrn); expectedPath.add(entry1); Assert.assertEquals(browsePathsV2.getPath(), expectedPath); } - private EntityService initMockServiceWithContainerParents(Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { + private EntityService initMockServiceWithContainerParents( + Urn entityUrn, Urn containerUrn1, Urn containerUrn2) throws URISyntaxException { EntityService mockService = mock(EntityService.class); final Container container1 = new Container().setContainer(containerUrn1); final Map aspectMap1 = new HashMap<>(); - aspectMap1.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); - final EntityResponse entityResponse1 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); - when( - mockService.getEntityV2(eq(entityUrn.getEntityType()), eq(entityUrn), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse1); + aspectMap1.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container1.data()))); + final EntityResponse entityResponse1 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap1)); + when(mockService.getEntityV2( + eq(entityUrn.getEntityType()), + eq(entityUrn), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse1); final Container container2 = new Container().setContainer(containerUrn2); final Map aspectMap2 = new HashMap<>(); - aspectMap2.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); - final EntityResponse entityResponse2 = new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); - when( - mockService.getEntityV2(eq(containerUrn1.getEntityType()), eq(containerUrn1), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(entityResponse2); - - when( - mockService.getEntityV2(eq(containerUrn2.getEntityType()), eq(containerUrn2), eq(Collections.singleton(CONTAINER_ASPECT_NAME))) - ).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); + aspectMap2.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(container2.data()))); + final EntityResponse entityResponse2 = + new EntityResponse().setAspects(new EnvelopedAspectMap(aspectMap2)); + when(mockService.getEntityV2( + eq(containerUrn1.getEntityType()), + eq(containerUrn1), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(entityResponse2); + + when(mockService.getEntityV2( + eq(containerUrn2.getEntityType()), + eq(containerUrn2), + eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap())); return mockService; - } } - diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java index ddd75a152c333..03abd9ffe29d7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESUtilsTest.java @@ -8,7 +8,6 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class ESUtilsTest { private static final String FIELD_TO_EXPAND = "fieldTags"; @@ -16,247 +15,241 @@ public class ESUtilsTest { @Test public void testGetQueryBuilderFromCriterionEqualsValues() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1"))); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion multiValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion multiValueCriterion = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(multiValueCriterion, false); expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField.keyword\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"terms\" : {\n" + + " \"myTestField.keyword\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EQUAL) - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField("myTestField") + .setCondition(Condition.EQUAL) + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"terms\" : {\n" - + " \"myTestField\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"terms\" : {\n" + + " \"myTestField\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - } @Test public void testGetQueryBuilderFromCriterionExists() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries field case for this condition. - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.EXISTS); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.EXISTS); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionIsNull() { - final Criterion singleValueCriterion = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion singleValueCriterion = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); String expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); // No diff in the timeseries case for this condition - final Criterion timeseriesField = new Criterion() - .setField("myTestField") - .setCondition(Condition.IS_NULL); + final Criterion timeseriesField = + new Criterion().setField("myTestField").setCondition(Condition.IS_NULL); result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); - expected = "{\n" - + " \"bool\" : {\n" - + " \"must_not\" : [\n" - + " {\n" - + " \"exists\" : {\n" - + " \"field\" : \"myTestField\",\n" - + " \"boost\" : 1.0\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"myTestField\"\n" - + " }\n" - + "}"; + expected = + "{\n" + + " \"bool\" : {\n" + + " \"must_not\" : [\n" + + " {\n" + + " \"exists\" : {\n" + + " \"field\" : \"myTestField\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"myTestField\"\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } @Test public void testGetQueryBuilderFromCriterionFieldToExpand() { - final Criterion singleValueCriterion = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1" - ))); + final Criterion singleValueCriterion = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1"))); // Ensure that the query is expanded! QueryBuilder result = ESUtils.getQueryBuilderFromCriterion(singleValueCriterion, false); - String expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags.keyword\" : [\n" - + " \"value1\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + String expected = + "{\n" + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags.keyword\" : [\n" + + " \"value1\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); - final Criterion timeseriesField = new Criterion() - .setField(FIELD_TO_EXPAND) - .setCondition(Condition.EQUAL) - .setValue("") // Ignored - .setValues(new StringArray(ImmutableList.of( - "value1", "value2" - ))); + final Criterion timeseriesField = + new Criterion() + .setField(FIELD_TO_EXPAND) + .setCondition(Condition.EQUAL) + .setValue("") // Ignored + .setValues(new StringArray(ImmutableList.of("value1", "value2"))); // Ensure that the query is expanded without keyword. result = ESUtils.getQueryBuilderFromCriterion(timeseriesField, true); expected = "{\n" - + " \"bool\" : {\n" - + " \"should\" : [\n" - + " {\n" - + " \"terms\" : {\n" - + " \"fieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"fieldTags\"\n" - + " }\n" - + " },\n" - + " {\n" - + " \"terms\" : {\n" - + " \"editedFieldTags\" : [\n" - + " \"value1\",\n" - + " \"value2\"\n" - + " ],\n" - + " \"boost\" : 1.0,\n" - + " \"_name\" : \"editedFieldTags\"\n" - + " }\n" - + " }\n" - + " ],\n" - + " \"adjust_pure_negative\" : true,\n" - + " \"boost\" : 1.0\n" - + " }\n" - + "}"; + + " \"bool\" : {\n" + + " \"should\" : [\n" + + " {\n" + + " \"terms\" : {\n" + + " \"fieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"fieldTags\"\n" + + " }\n" + + " },\n" + + " {\n" + + " \"terms\" : {\n" + + " \"editedFieldTags\" : [\n" + + " \"value1\",\n" + + " \"value2\"\n" + + " ],\n" + + " \"boost\" : 1.0,\n" + + " \"_name\" : \"editedFieldTags\"\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"adjust_pure_negative\" : true,\n" + + " \"boost\" : 1.0\n" + + " }\n" + + "}"; Assert.assertEquals(result.toString(), expected); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java index 7aa3bb19f0df6..5ea58e3416205 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/SearchUtilsTest.java @@ -1,75 +1,164 @@ package com.linkedin.metadata.search.utils; -import com.linkedin.metadata.query.SearchFlags; -import org.testng.annotations.Test; +import static org.testng.Assert.assertEquals; +import com.linkedin.metadata.query.SearchFlags; import java.util.Set; - -import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; public class SearchUtilsTest { - @Test - public void testApplyDefaultSearchFlags() { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); + @Test + public void testApplyDefaultSearchFlags() { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); - assertEquals(SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), defaultFlags, - "Expected all default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags(null, "not empty", defaultFlags), + defaultFlags, + "Expected all default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), null, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - "Expected skip highlight due to query null query"); - for (String query : Set.of("*", "")) { - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), query, defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(true), - String.format("Expected skip highlight due to query string `%s`", query)); - } - - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except fulltext"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipCache(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(false).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipCache"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(false).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(true), - "Expected all default values except skipAggregates"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(2).setSkipHighlighting(true), - "Expected all default values except maxAggValues"); - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipCache(true).setMaxAggValues(1).setSkipHighlighting(false), - "Expected all default values except skipHighlighting"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + null, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected skip highlight due to query null query"); + for (String query : Set.of("*", "")) { + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + query, + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(true), + String.format("Expected skip highlight due to query string `%s`", query)); } - @Test - public void testImmutableDefaults() throws CloneNotSupportedException { - SearchFlags defaultFlags = new SearchFlags() - .setFulltext(true) - .setSkipCache(true) - .setSkipAggregates(true) - .setMaxAggValues(1) - .setSkipHighlighting(true); - SearchFlags copyFlags = defaultFlags.copy(); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setFulltext(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except fulltext"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipCache(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(false) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipCache"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipAggregates(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(true), + "Expected all default values except skipAggregates"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setMaxAggValues(2), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(2) + .setSkipHighlighting(true), + "Expected all default values except maxAggValues"); + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags().setSkipHighlighting(false), "not empty", defaultFlags), + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipCache(true) + .setMaxAggValues(1) + .setSkipHighlighting(false), + "Expected all default values except skipHighlighting"); + } - assertEquals(SearchUtils.applyDefaultSearchFlags(new SearchFlags().setFulltext(false).setSkipCache(false) - .setSkipAggregates(false).setMaxAggValues(2).setSkipHighlighting(false), "not empty", defaultFlags), - new SearchFlags().setFulltext(false).setSkipAggregates(false).setSkipCache(false).setMaxAggValues(2).setSkipHighlighting(false), - "Expected no default values"); + @Test + public void testImmutableDefaults() throws CloneNotSupportedException { + SearchFlags defaultFlags = + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipAggregates(true) + .setMaxAggValues(1) + .setSkipHighlighting(true); + SearchFlags copyFlags = defaultFlags.copy(); - assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); - } + assertEquals( + SearchUtils.applyDefaultSearchFlags( + new SearchFlags() + .setFulltext(false) + .setSkipCache(false) + .setSkipAggregates(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "not empty", + defaultFlags), + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(false) + .setSkipCache(false) + .setMaxAggValues(2) + .setSkipHighlighting(false), + "Expected no default values"); + assertEquals(defaultFlags, copyFlags, "Expected defaults to be unmodified"); + } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java index e6a9bd7d198f7..8643855162fa7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/systemmetadata/SystemMetadataServiceTestBase.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.systemmetadata; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; + import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.run.IngestionRunSummary; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; @@ -8,30 +11,27 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.mxe.SystemMetadata; +import java.util.List; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nonnull; -import java.util.List; - -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; - -abstract public class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class SystemMetadataServiceTestBase extends AbstractTestNGSpringContextTests { @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); - private final IndexConvention _indexConvention = new IndexConventionImpl("es_system_metadata_service_test"); + private final IndexConvention _indexConvention = + new IndexConventionImpl("es_system_metadata_service_test"); private ElasticSearchSystemMetadataService _client; @@ -48,8 +48,10 @@ public void wipe() throws Exception { @Nonnull private ElasticSearchSystemMetadataService buildService() { - ESSystemMetadataDAO dao = new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); - return new ElasticSearchSystemMetadataService(getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); + ESSystemMetadataDAO dao = + new ESSystemMetadataDAO(getSearchClient(), _indexConvention, getBulkProcessor(), 1); + return new ElasticSearchSystemMetadataService( + getBulkProcessor(), _indexConvention, dao, getIndexBuilder()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java index 407d2ae684ede..921fbac12df85 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/CassandraTimelineServiceTest.java @@ -1,8 +1,10 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.datastax.oss.driver.api.core.CqlSession; import com.linkedin.metadata.CassandraTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -14,21 +16,19 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a Cassandra database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a Cassandra database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + *

This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class CassandraTimelineServiceTest extends TimelineServiceTest { private CassandraContainer _cassandraContainer; - public CassandraTimelineServiceTest() throws EntityRegistryException { - } + public CassandraTimelineServiceTest() throws EntityRegistryException {} @BeforeClass public void setupContainer() { @@ -54,14 +54,20 @@ private void configureComponents() { _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java index 9e89328715510..4e47e596dddc2 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/EbeanTimelineServiceTest.java @@ -1,7 +1,9 @@ package com.linkedin.metadata.timeline; -import com.linkedin.metadata.config.PreProcessHooks; +import static org.mockito.Mockito.mock; + import com.linkedin.metadata.EbeanTestUtils; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import com.linkedin.metadata.event.EventProducer; @@ -11,37 +13,42 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.mock; - /** - * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against a relational database. + * A class that knows how to configure {@link TimelineServiceTest} to run integration tests against + * a relational database. * - * This class also contains all the test methods where realities of an underlying storage leak into the - * {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that should never happen, and it'd be - * great to address captured differences. + *

This class also contains all the test methods where realities of an underlying storage leak + * into the {@link TimelineServiceImpl} in the form of subtle behavior differences. Ideally that + * should never happen, and it'd be great to address captured differences. */ public class EbeanTimelineServiceTest extends TimelineServiceTest { - public EbeanTimelineServiceTest() throws EntityRegistryException { - } + public EbeanTimelineServiceTest() throws EntityRegistryException {} @BeforeMethod public void setupTest() { - Database server = EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); + Database server = + EbeanTestUtils.createTestServer(EbeanTimelineServiceTest.class.getSimpleName()); _aspectDao = new EbeanAspectDao(server); _aspectDao.setConnectionValidated(true); _entityTimelineService = new TimelineServiceImpl(_aspectDao, _testEntityRegistry); _mockProducer = mock(EventProducer.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - _entityServiceImpl = new EntityServiceImpl(_aspectDao, _mockProducer, _testEntityRegistry, true, - _mockUpdateIndicesService, preProcessHooks); + _entityServiceImpl = + new EntityServiceImpl( + _aspectDao, + _mockProducer, + _testEntityRegistry, + true, + _mockUpdateIndicesService, + preProcessHooks); } /** * Ideally, all tests would be in the base class, so they're reused between all implementations. - * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy test - * to make sure this class will always be discovered. + * When that's the case - test runner will ignore this class (and its base!) so we keep this dummy + * test to make sure this class will always be discovered. */ @Test public void obligatoryTest() throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java index b3e4b84a4962d..6cea5a78201b7 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/TimelineServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.timeline; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.FabricType; import com.linkedin.common.urn.DataPlatformUrn; @@ -26,8 +28,6 @@ import com.linkedin.schema.SchemaMetadata; import com.linkedin.schema.StringType; import com.linkedin.util.Pair; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -35,29 +35,30 @@ import java.util.List; import java.util.Map; import java.util.Set; - -import static org.mockito.Mockito.*; - +import org.testng.annotations.Test; /** * A class to test {@link TimelineServiceImpl} * - * This class is generic to allow same integration tests to be reused to test all supported storage backends. - * If you're adding another storage backend - you should create a new test class that extends this one providing - * hard implementations of {@link AspectDao} and implements {@code @BeforeMethod} etc to set up and tear down state. + *

This class is generic to allow same integration tests to be reused to test all supported + * storage backends. If you're adding another storage backend - you should create a new test class + * that extends this one providing hard implementations of {@link AspectDao} and implements + * {@code @BeforeMethod} etc to set up and tear down state. * - * If you realise that a feature you want to test, sadly, has divergent behaviours between different storage implementations, - * that you can't rectify - you should make the test method abstract and implement it in all implementations of this class. + *

If you realise that a feature you want to test, sadly, has divergent behaviours between + * different storage implementations, that you can't rectify - you should make the test method + * abstract and implement it in all implementations of this class. * * @param {@link AspectDao} implementation. */ -abstract public class TimelineServiceTest { +public abstract class TimelineServiceTest { protected T_AD _aspectDao; protected final EntityRegistry _snapshotEntityRegistry = new TestEntityRegistry(); protected final EntityRegistry _configEntityRegistry = - new ConfigEntityRegistry(Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); protected final EntityRegistry _testEntityRegistry = new MergedEntityRegistry(_snapshotEntityRegistry).apply(_configEntityRegistry); protected TimelineServiceImpl _entityTimelineService; @@ -65,14 +66,16 @@ abstract public class TimelineServiceTest { protected EventProducer _mockProducer; protected UpdateIndicesService _mockUpdateIndicesService = mock(UpdateIndicesService.class); - protected TimelineServiceTest() throws EntityRegistryException { - } + protected TimelineServiceTest() throws EntityRegistryException {} @Test public void testGetTimeline() throws Exception { - Urn entityUrn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + System.currentTimeMillis() + ",PROD)"); + Urn entityUrn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hive,fooDb.fooTable" + + System.currentTimeMillis() + + ",PROD)"); String aspectName = "schemaMetadata"; ArrayList timestamps = new ArrayList(); @@ -82,39 +85,49 @@ public void testGetTimeline() throws Exception { SchemaMetadata schemaMetadata = getSchemaMetadata("This is the new description for day " + i); AuditStamp daysAgo = createTestAuditStamp(i); timestamps.add(daysAgo); - _entityServiceImpl.ingestAspects(entityUrn, Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), - daysAgo, getSystemMetadata(daysAgo, "run-" + i)); + _entityServiceImpl.ingestAspects( + entityUrn, + Collections.singletonList(new Pair<>(aspectName, schemaMetadata)), + daysAgo, + getSystemMetadata(daysAgo, "run-" + i)); } Map latestAspects = - _entityServiceImpl.getLatestAspectsForUrn(entityUrn, new HashSet<>(Arrays.asList(aspectName))); + _entityServiceImpl.getLatestAspectsForUrn( + entityUrn, new HashSet<>(Arrays.asList(aspectName))); Set elements = new HashSet<>(); elements.add(ChangeCategory.TECHNICAL_SCHEMA); List changes = - _entityTimelineService.getTimeline(entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, - false); - //Assert.assertEquals(changes.size(), 7); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.ADD); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, createTestAuditStamp(10).getTime(), 0, null, null, false); + // Assert.assertEquals(changes.size(), 7); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.ADD); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(0).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(1).getTime().longValue()); changes = - _entityTimelineService.getTimeline(entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, - false); - //Assert.assertEquals(changes.size(), 3); - //Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); - //Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), ChangeOperation.MODIFY); - //Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); + _entityTimelineService.getTimeline( + entityUrn, elements, timestamps.get(4).getTime() - 3000L, 0, null, null, false); + // Assert.assertEquals(changes.size(), 3); + // Assert.assertEquals(changes.get(0).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(0).getTimestamp(), timestamps.get(4).getTime().longValue()); + // Assert.assertEquals(changes.get(1).getChangeEvents().get(0).getChangeType(), + // ChangeOperation.MODIFY); + // Assert.assertEquals(changes.get(1).getTimestamp(), timestamps.get(5).getTime().longValue()); } private static AuditStamp createTestAuditStamp(int daysAgo) { try { Long timestamp = System.currentTimeMillis() - (daysAgo * 24 * 60 * 60 * 1000L); Long timestampRounded = 1000 * (timestamp / 1000); - return new AuditStamp().setTime(timestampRounded).setActor(Urn.createFromString("urn:li:principal:tester")); + return new AuditStamp() + .setTime(timestampRounded) + .setActor(Urn.createFromString("urn:li:principal:tester")); } catch (Exception e) { throw new RuntimeException("Failed to create urn"); } @@ -128,17 +141,22 @@ private SystemMetadata getSystemMetadata(AuditStamp twoDaysAgo, String s) { } private SchemaMetadata getSchemaMetadata(String s) { - SchemaField field1 = new SchemaField() - .setFieldPath("column1") - .setDescription(s) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setNativeDataType("string"); + SchemaField field1 = + new SchemaField() + .setFieldPath("column1") + .setDescription(s) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNativeDataType("string"); SchemaFieldArray fieldArray = new SchemaFieldArray(); fieldArray.add(field1); - return new SchemaMetadata().setSchemaName("testSchema") - .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) + return new SchemaMetadata() + .setSchemaName("testSchema") + .setPlatformSchema( + SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema("foo"))) .setPlatform(new DataPlatformUrn("hive")) .setHash("") .setVersion(0L) diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java index 75508320abdce..3e9f1cd0fe092 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeline/eventgenerator/SchemaMetadataChangeEventGeneratorTest.java @@ -1,44 +1,50 @@ package com.linkedin.metadata.timeline.eventgenerator; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.timeline.data.ChangeEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.restli.internal.server.util.DataMapUtils; import com.linkedin.schema.SchemaMetadata; -import org.apache.commons.io.IOUtils; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.Test; - import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; - -import static org.testng.AssertJUnit.assertEquals; +import org.apache.commons.io.IOUtils; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; public class SchemaMetadataChangeEventGeneratorTest extends AbstractTestNGSpringContextTests { - @Test - public void testDelete() throws Exception { - SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); - - Urn urn = Urn.createFromString( - "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); - String entity = "dataset"; - String aspect = "schemaMetadata"; - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) - .setTime(1683829509553L); - Aspect from = new Aspect<>(DataMapUtils.read(IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), - SchemaMetadata.class, Map.of()), new SystemMetadata()); - Aspect to = new Aspect<>(null, new SystemMetadata()); - - List actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); - - assertEquals(14, actual.size()); - } - - //CHECKSTYLE:OFF - private static final String TEST_OBJECT = "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; - //CHECKSTYLE:ON + @Test + public void testDelete() throws Exception { + SchemaMetadataChangeEventGenerator test = new SchemaMetadataChangeEventGenerator(); + + Urn urn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String entity = "dataset"; + String aspect = "schemaMetadata"; + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString("urn:li:corpuser:__datahub_system")) + .setTime(1683829509553L); + Aspect from = + new Aspect<>( + DataMapUtils.read( + IOUtils.toInputStream(TEST_OBJECT, StandardCharsets.UTF_8), + SchemaMetadata.class, + Map.of()), + new SystemMetadata()); + Aspect to = new Aspect<>(null, new SystemMetadata()); + + List actual = test.getChangeEvents(urn, entity, aspect, from, to, auditStamp); + + assertEquals(14, actual.size()); + } + + // CHECKSTYLE:OFF + private static final String TEST_OBJECT = + "{\"platformSchema\":{\"com.linkedin.schema.KafkaSchema\":{\"documentSchema\":\"{\\\"type\\\":\\\"record\\\",\\\"name\\\":\\\"SampleHdfsSchema\\\",\\\"namespace\\\":\\\"com.linkedin.dataset\\\",\\\"doc\\\":\\\"Sample HDFS dataset\\\",\\\"fields\\\":[{\\\"name\\\":\\\"field_foo\\\",\\\"type\\\":[\\\"string\\\"]},{\\\"name\\\":\\\"field_bar\\\",\\\"type\\\":[\\\"boolean\\\"]}]}\"}},\"created\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"lastModified\":{\"actor\":\"urn:li:corpuser:jdoe\",\"time\":1674291843000},\"fields\":[{\"nullable\":false,\"fieldPath\":\"shipment_info\",\"description\":\"Shipment info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.date\",\"description\":\"Shipment info date description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.DateType\":{}}},\"recursive\":false,\"nativeDataType\":\"Date\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.target\",\"description\":\"Shipment info target description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"text\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.destination\",\"description\":\"Shipment info destination description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.StringType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info\",\"description\":\"Shipment info geo_info description\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.RecordType\":{}}},\"recursive\":false,\"nativeDataType\":\"varchar(100)\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lat\",\"description\":\"Shipment info geo_info lat\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"},{\"nullable\":false,\"fieldPath\":\"shipment_info.geo_info.lng\",\"description\":\"Shipment info geo_info lng\",\"isPartOfKey\":false,\"type\":{\"type\":{\"com.linkedin.schema.NumberType\":{}}},\"recursive\":false,\"nativeDataType\":\"float\"}],\"schemaName\":\"SampleHdfsSchema\",\"version\":0,\"hash\":\"\",\"platform\":\"urn:li:dataPlatform:hdfs\"}"; + // CHECKSTYLE:ON } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java index 1362a0f69eff2..13236e302c259 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/timeseries/search/TimeseriesAspectServiceTestBase.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.timeseries.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; +import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + import com.datahub.test.BatchType; import com.datahub.test.ComplexNestedRecord; import com.datahub.test.TestEntityComponentProfile; @@ -46,37 +54,35 @@ import com.linkedin.timeseries.GroupingBucketType; import com.linkedin.timeseries.TimeWindowSize; import com.linkedin.timeseries.TimeseriesIndexSizeResult; -import org.opensearch.client.RestHighLevelClient; -import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; +import org.opensearch.client.RestHighLevelClient; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; -import static io.datahubproject.test.search.SearchTestUtils.syncAfterWrite; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.fail; - -abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { +public abstract class TimeseriesAspectServiceTestBase extends AbstractTestNGSpringContextTests { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } private static final String ENTITY_NAME = "testEntity"; private static final String ASPECT_NAME = "testEntityProfile"; - private static final Urn TEST_URN = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); + private static final Urn TEST_URN = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table1"); private static final int NUM_PROFILES = 100; private static final long TIME_INCREMENT = 3600000; // hour in ms. private static final String CONTENT_TYPE = "application/json"; @@ -85,13 +91,13 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri private static final String ES_FIELD_STAT = "stat"; @Nonnull - abstract protected RestHighLevelClient getSearchClient(); + protected abstract RestHighLevelClient getSearchClient(); @Nonnull - abstract protected ESBulkProcessor getBulkProcessor(); + protected abstract ESBulkProcessor getBulkProcessor(); @Nonnull - abstract protected ESIndexBuilder getIndexBuilder(); + protected abstract ESIndexBuilder getIndexBuilder(); private EntityRegistry _entityRegistry; private IndexConvention _indexConvention; @@ -107,8 +113,12 @@ abstract public class TimeseriesAspectServiceTestBase extends AbstractTestNGSpri @BeforeClass public void setup() { - _entityRegistry = new ConfigEntityRegistry(new DataSchemaFactory("com.datahub.test"), - TestEntityProfile.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + _entityRegistry = + new ConfigEntityRegistry( + new DataSchemaFactory("com.datahub.test"), + TestEntityProfile.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); _indexConvention = new IndexConventionImpl("es_timeseries_aspect_service_test"); _elasticSearchTimeseriesAspectService = buildService(); _elasticSearchTimeseriesAspectService.configure(); @@ -118,9 +128,13 @@ public void setup() { @Nonnull private ElasticSearchTimeseriesAspectService buildService() { - return new ElasticSearchTimeseriesAspectService(getSearchClient(), _indexConvention, - new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, - _indexConvention), _entityRegistry, getBulkProcessor(), 1); + return new ElasticSearchTimeseriesAspectService( + getSearchClient(), + _indexConvention, + new TimeseriesAspectIndexBuilders(getIndexBuilder(), _entityRegistry, _indexConvention), + _entityRegistry, + getBulkProcessor(), + 1); } /* @@ -128,10 +142,13 @@ private ElasticSearchTimeseriesAspectService buildService() { */ private void upsertDocument(TestEntityProfile dp, Urn urn) throws JsonProcessingException { - Map documents = TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); + Map documents = + TimeseriesAspectTransformer.transform(urn, dp, _aspectSpec, null); assertEquals(documents.size(), 3); documents.forEach( - (key, value) -> _elasticSearchTimeseriesAspectService.upsertDocument(ENTITY_NAME, ASPECT_NAME, key, value)); + (key, value) -> + _elasticSearchTimeseriesAspectService.upsertDocument( + ENTITY_NAME, ASPECT_NAME, key, value)); } private TestEntityProfile makeTestProfile(long eventTime, long stat, String messageId) { @@ -140,7 +157,8 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess testEntityProfile.setStat(stat); testEntityProfile.setStrStat(String.valueOf(stat)); testEntityProfile.setStrArray(new StringArray("sa_" + stat, "sa_" + (stat + 1))); - testEntityProfile.setEventGranularity(new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); + testEntityProfile.setEventGranularity( + new TimeWindowSize().setUnit(CalendarInterval.DAY).setMultiple(1)); if (messageId != null) { testEntityProfile.setMessageId(messageId); } @@ -152,14 +170,17 @@ private TestEntityProfile makeTestProfile(long eventTime, long stat, String mess TestEntityComponentProfile componentProfile2 = new TestEntityComponentProfile(); componentProfile2.setKey("col2"); componentProfile2.setStat(stat + 2); - testEntityProfile.setComponentProfiles(new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); + testEntityProfile.setComponentProfiles( + new TestEntityComponentProfileArray(componentProfile1, componentProfile2)); StringMap stringMap1 = new StringMap(); stringMap1.put("p_key1", "p_val1"); StringMap stringMap2 = new StringMap(); stringMap2.put("p_key2", "p_val2"); - ComplexNestedRecord nestedRecord = new ComplexNestedRecord().setType(BatchType.PARTITION_BATCH) - .setPartitions(new StringMapArray(stringMap1, stringMap2)); + ComplexNestedRecord nestedRecord = + new ComplexNestedRecord() + .setType(BatchType.PARTITION_BATCH) + .setPartitions(new StringMapArray(stringMap1, stringMap2)); testEntityProfile.setAComplexNestedRecord(nestedRecord); return testEntityProfile; @@ -172,57 +193,74 @@ public void testUpsertProfiles() throws Exception { _startTime = _startTime - _startTime % 86400000; // Create the testEntity profiles that we would like to use for testing. TestEntityProfile firstProfile = makeTestProfile(_startTime, 20, null); - Stream testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, - null)); - - _testEntityProfiles = testEntityProfileStream.limit(NUM_PROFILES) - .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); + Stream testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis() + TIME_INCREMENT, prev.getStat() + 10, null)); + + _testEntityProfiles = + testEntityProfileStream + .limit(NUM_PROFILES) + .collect(Collectors.toMap(TestEntityProfile::getTimestampMillis, Function.identity())); Long endTime = _startTime + (NUM_PROFILES - 1) * TIME_INCREMENT; assertNotNull(_testEntityProfiles.get(_startTime)); assertNotNull(_testEntityProfiles.get(endTime)); // Upsert the documents into the index. - _testEntityProfiles.values().forEach(x -> { - try { - upsertDocument(x, TEST_URN); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + _testEntityProfiles + .values() + .forEach( + x -> { + try { + upsertDocument(x, TEST_URN); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); } @Test(groups = "upsertUniqueMessageId") public void testUpsertProfilesWithUniqueMessageIds() throws Exception { - // Create the testEntity profiles that have the same value for timestampMillis, but use unique message ids. + // Create the testEntity profiles that have the same value for timestampMillis, but use unique + // message ids. // We should preserve all the documents we are going to upsert in the index. final long curTimeMillis = Calendar.getInstance().getTimeInMillis(); final long startTime = curTimeMillis - curTimeMillis % 86400000; final TestEntityProfile firstProfile = makeTestProfile(startTime, 20, "20"); - Stream testEntityProfileStream = Stream.iterate(firstProfile, - (TestEntityProfile prev) -> makeTestProfile(prev.getTimestampMillis(), prev.getStat() + 10, - String.valueOf(prev.getStat() + 10))); - - final List testEntityProfiles = testEntityProfileStream.limit(3).collect(Collectors.toList()); + Stream testEntityProfileStream = + Stream.iterate( + firstProfile, + (TestEntityProfile prev) -> + makeTestProfile( + prev.getTimestampMillis(), + prev.getStat() + 10, + String.valueOf(prev.getStat() + 10))); + + final List testEntityProfiles = + testEntityProfileStream.limit(3).collect(Collectors.toList()); // Upsert the documents into the index. - final Urn urn = new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); - testEntityProfiles.forEach(x -> { - try { - upsertDocument(x, urn); - } catch (JsonProcessingException jsonProcessingException) { - jsonProcessingException.printStackTrace(); - } - }); + final Urn urn = + new TestEntityUrn("acryl", "testElasticSearchTimeseriesAspectService", "table2"); + testEntityProfiles.forEach( + x -> { + try { + upsertDocument(x, urn); + } catch (JsonProcessingException jsonProcessingException) { + jsonProcessingException.printStackTrace(); + } + }); syncAfterWrite(getBulkProcessor()); List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(urn, ENTITY_NAME, ASPECT_NAME, null, null, - testEntityProfiles.size(), null); + _elasticSearchTimeseriesAspectService.getAspectValues( + urn, ENTITY_NAME, ASPECT_NAME, null, null, testEntityProfiles.size(), null); assertEquals(resultAspects.size(), testEntityProfiles.size()); } @@ -232,8 +270,9 @@ public void testUpsertProfilesWithUniqueMessageIds() throws Exception { private void validateAspectValue(EnvelopedAspect envelopedAspectResult) { TestEntityProfile actualProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(envelopedAspectResult.getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + envelopedAspectResult.getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile expectedProfile = _testEntityProfiles.get(actualProfile.getTimestampMillis()); assertNotNull(expectedProfile); assertEquals(actualProfile.getStat(), expectedProfile.getStat()); @@ -248,18 +287,23 @@ private void validateAspectValues(List aspects, long numResults @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAll() { List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); assertEquals((long) lastProfile.getStat(), 20); } @@ -267,31 +311,43 @@ public void testGetAspectTimeseriesValuesAll() { @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesAllSorted() { List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null, new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + null, + null, + NUM_PROFILES, + null, + new SortCriterion().setField("stat").setOrder(SortOrder.ASCENDING)); validateAspectValues(resultAspects, NUM_PROFILES); TestEntityProfile firstProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(0).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(0).getAspect().getValue(), CONTENT_TYPE, _aspectSpec); TestEntityProfile lastProfile = - (TestEntityProfile) GenericRecordUtils.deserializeAspect(resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), - CONTENT_TYPE, _aspectSpec); - - // Now verify that the first index is the one with the highest stat value, and the last the one with the lower. + (TestEntityProfile) + GenericRecordUtils.deserializeAspect( + resultAspects.get(resultAspects.size() - 1).getAspect().getValue(), + CONTENT_TYPE, + _aspectSpec); + + // Now verify that the first index is the one with the highest stat value, and the last the one + // with the lower. assertEquals((long) firstProfile.getStat(), 20); assertEquals((long) lastProfile.getStat(), 20 + (NUM_PROFILES - 1) * 10); - } @Test(groups = "getAspectValues", dependsOnGroups = "upsert") public void testGetAspectTimeseriesValuesWithFilter() { Filter filter = new Filter(); - Criterion hasStatEqualsTwenty = new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); + Criterion hasStatEqualsTwenty = + new Criterion().setField("stat").setCondition(Condition.EQUAL).setValue("20"); filter.setCriteria(new CriterionArray(hasStatEqualsTwenty)); List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, filter); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, filter); validateAspectValues(resultAspects, 1); } @@ -299,8 +355,14 @@ public void testGetAspectTimeseriesValuesWithFilter() { public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { int expectedNumRows = 10; List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, _startTime, - _startTime + TIME_INCREMENT * (expectedNumRows - 1), expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime, + _startTime + TIME_INCREMENT * (expectedNumRows - 1), + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -308,9 +370,14 @@ public void testGetAspectTimeseriesValuesSubRangeInclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { int expectedNumRows = 10; List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -318,9 +385,14 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlap() { public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly() { int expectedNumRows = 1; List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, - expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * expectedNumRows + TIME_INCREMENT / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } @@ -328,17 +400,25 @@ public void testGetAspectTimeseriesValuesSubRangeExclusiveOverlapLatestValueOnly public void testGetAspectTimeseriesValuesExactlyOneResponse() { int expectedNumRows = 1; List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(TEST_URN, ENTITY_NAME, ASPECT_NAME, - _startTime + TIME_INCREMENT / 2, _startTime + TIME_INCREMENT * 3 / 2, expectedNumRows, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + TEST_URN, + ENTITY_NAME, + ASPECT_NAME, + _startTime + TIME_INCREMENT / 2, + _startTime + TIME_INCREMENT * 3 / 2, + expectedNumRows, + null); validateAspectValues(resultAspects, expectedNumRows); } - @Test(groups = {"getAspectValues"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAspectValues"}, + dependsOnGroups = {"upsert"}) public void testGetAspectTimeseriesValueMissingUrn() { Urn nonExistingUrn = new TestEntityUrn("missing", "missing", "missing"); List resultAspects = - _elasticSearchTimeseriesAspectService.getAspectValues(nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, - NUM_PROFILES, null); + _elasticSearchTimeseriesAspectService.getAspectValues( + nonExistingUrn, ENTITY_NAME, ASPECT_NAME, null, null, NUM_PROFILES, null); validateAspectValues(resultAspects, 0); } @@ -347,71 +427,109 @@ public void testGetAspectTimeseriesValueMissingUrn() { */ /* Latest Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("aComplexNestedRecord"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("aComplexNestedRecord"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_aComplexNestedRecord")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "record")); // Validate rows @@ -421,86 +539,121 @@ public void testGetAggregatedStatsLatestAComplexNestedRecordForDay1() { try { ComplexNestedRecord latestAComplexNestedRecord = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), ComplexNestedRecord.class); - assertEquals(latestAComplexNestedRecord, + assertEquals( + latestAComplexNestedRecord, _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getAComplexNestedRecord()); } catch (JsonProcessingException e) { fail("Unexpected exception thrown" + e); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStrArrayDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("strArray"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + "strArray")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "array")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - StringArray expectedStrArray = _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); - //assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), + StringArray expectedStrArray = + _testEntityProfiles.get(_startTime + 23 * TIME_INCREMENT).getStrArray(); + // assertEquals(resultTable.getRows(), new StringArrayArray(new + // StringArray(_startTime.toString(), // expectedStrArray.toString()))); // Test array construction using object mapper as well try { - StringArray actualStrArray = OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); + StringArray actualStrArray = + OBJECT_MAPPER.readValue(resultTable.getRows().get(0).get(1), StringArray.class); assertEquals(actualStrArray, expectedStrArray); } catch (JsonProcessingException e) { e.printStackTrace(); } } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForTwoDays() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 47 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows @@ -508,132 +661,223 @@ public void testGetAggregatedStatsLatestStatForTwoDays() { assertEquals(resultTable.getRows().size(), 2); Long latestDay1Ts = _startTime + 23 * TIME_INCREMENT; Long latestDay2Ts = _startTime + 47 * TIME_INCREMENT; - assertEquals(resultTable.getRows(), new StringArrayArray( - new StringArray(_startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), - new StringArray(String.valueOf(_startTime + 24 * TIME_INCREMENT), - _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), _testEntityProfiles.get(latestDay1Ts).getStat().toString()), + new StringArray( + String.valueOf(_startTime + 24 * TIME_INCREMENT), + _testEntityProfiles.get(latestDay2Ts).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "latest_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), - _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + _testEntityProfiles.get(_startTime + 9 * TIME_INCREMENT).getStat().toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForCol1Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol1 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col1"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col1"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol1, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()))); + assertEquals( + resultTable.getRows(), + new StringArrayArray( + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec latestStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{latestStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {latestStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "latest_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "long")); // Validate rows - StringArray expectedRow1 = new StringArray(_startTime.toString(), "col1", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(0).getStat().toString()); - StringArray expectedRow2 = new StringArray(_startTime.toString(), "col2", - _testEntityProfiles.get(lastEntryTimeStamp).getComponentProfiles().get(1).getStat().toString()); + StringArray expectedRow1 = + new StringArray( + _startTime.toString(), + "col1", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(0) + .getStat() + .toString()); + StringArray expectedRow2 = + new StringArray( + _startTime.toString(), + "col2", + _testEntityProfiles + .get(lastEntryTimeStamp) + .getComponentProfiles() + .get(1) + .getStat() + .toString()); assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); @@ -641,33 +885,48 @@ public void testGetAggregatedStatsLatestStatForAllColumnsDay1() { } /* Sum Aggregation Tests */ - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 9 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumAggregationSpec = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("stat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); + assertEquals( + resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "sum_" + ES_FIELD_STAT)); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "double")); // Validate rows @@ -675,45 +934,68 @@ public void testGetAggregatedStatsSumStatForFirst10HoursOfDay1() { assertEquals(resultTable.getRows().size(), 1); // value is 20+30+40+... up to 10 terms = 650 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), String.valueOf(650)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatForCol2Day1() { Long lastEntryTimeStamp = _startTime + 23 * TIME_INCREMENT; Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(lastEntryTimeStamp)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(lastEntryTimeStamp)); Criterion hasCol2 = - new Criterion().setField("componentProfiles.key").setCondition(Condition.EQUAL).setValue("col2"); + new Criterion() + .setField("componentProfiles.key") + .setCondition(Condition.EQUAL) + .setValue("col2"); - Filter filter = QueryUtils.getFilterFromCriteria( - ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); + Filter filter = + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, hasCol2, startTimeCriterion, endTimeCriterion)); // Aggregate the sum of stat value AggregationSpec sumStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is timestamp filed + componentProfiles.key. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); GroupingBucket componentProfilesBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{sumStatAggregationSpec}, filter, - new GroupingBucket[]{timestampBucket, componentProfilesBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {sumStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket, componentProfilesBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), - new StringArray(ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray( + ES_FIELD_TIMESTAMP, "componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "string", "double")); // Validate rows @@ -721,115 +1003,166 @@ public void testGetAggregatedStatsSumStatForCol2Day1() { assertEquals(resultTable.getRows().size(), 1); // value = 22+32+42+... 24 terms = 3288 // TODO: Compute this caching the documents. - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "col2", String.valueOf(3288)))); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsCardinalityAggStrStatDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("strStat"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("strStat"); // Grouping bucket is only timestamp filed. - GroupingBucket timestampBucket = new GroupingBucket().setKey(ES_FIELD_TIMESTAMP) - .setType(GroupingBucketType.DATE_GROUPING_BUCKET) - .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{timestampBucket}); + GroupingBucket timestampBucket = + new GroupingBucket() + .setKey(ES_FIELD_TIMESTAMP) + .setType(GroupingBucketType.DATE_GROUPING_BUCKET) + .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(CalendarInterval.DAY)); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {timestampBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); + assertEquals( + resultTable.getColumnNames(), + new StringArray(ES_FIELD_TIMESTAMP, "cardinality_" + "strStat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("long", "long")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 1); - assertEquals(resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray(_startTime.toString(), "24"))); } - @Test(groups = {"getAggregatedStats", "usageStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats", "usageStats"}, + dependsOnGroups = {"upsert"}) public void testGetAggregatedStatsSumStatsCollectionDay1() { // Filter is only on the urn Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); // Aggregate on latest stat value AggregationSpec cardinalityStatAggregationSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("componentProfiles.stat"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("componentProfiles.stat"); // Grouping bucket is only timestamp filed. GroupingBucket profileStatBucket = - new GroupingBucket().setKey("componentProfiles.key").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - - GenericTable resultTable = _elasticSearchTimeseriesAspectService.getAggregatedStats(ENTITY_NAME, ASPECT_NAME, - new AggregationSpec[]{cardinalityStatAggregationSpec}, filter, new GroupingBucket[]{profileStatBucket}); + new GroupingBucket() + .setKey("componentProfiles.key") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + + GenericTable resultTable = + _elasticSearchTimeseriesAspectService.getAggregatedStats( + ENTITY_NAME, + ASPECT_NAME, + new AggregationSpec[] {cardinalityStatAggregationSpec}, + filter, + new GroupingBucket[] {profileStatBucket}); // Validate column names - assertEquals(resultTable.getColumnNames(), + assertEquals( + resultTable.getColumnNames(), new StringArray("componentProfiles.key", "sum_" + "componentProfiles.stat")); // Validate column types assertEquals(resultTable.getColumnTypes(), new StringArray("string", "double")); // Validate rows assertNotNull(resultTable.getRows()); assertEquals(resultTable.getRows().size(), 2); - assertEquals(resultTable.getRows(), + assertEquals( + resultTable.getRows(), new StringArrayArray(new StringArray("col1", "3264"), new StringArray("col2", "3288"))); } - @Test(groups = {"deleteAspectValues1"}, dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) + @Test( + groups = {"deleteAspectValues1"}, + dependsOnGroups = {"getAggregatedStats", "getAspectValues", "testCountBeforeDelete"}) public void testDeleteAspectValuesByUrnAndTimeRangeDay1() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter filter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic docs: + // For day1, we expect 24 (number of hours) * 3 (each testEntityProfile aspect expands 3 elastic + // docs: // 1 original + 2 for componentProfiles) = 72 total. assertEquals(result.getNumDocsDeleted(), Long.valueOf(72L)); } - @Test(groups = {"deleteAspectValues2"}, dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) + @Test( + groups = {"deleteAspectValues2"}, + dependsOnGroups = {"deleteAspectValues1", "testCountAfterDelete"}) public void testDeleteAspectValuesByUrn() { Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(TEST_URN.toString()); Filter filter = QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion)); DeleteAspectValuesResult result = _elasticSearchTimeseriesAspectService.deleteAspectValues(ENTITY_NAME, ASPECT_NAME, filter); - // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test group leaving 228. + // Of the 300 elastic docs upserted for TEST_URN, 72 got deleted by deleteAspectValues1 test + // group leaving 228. assertEquals(result.getNumDocsDeleted(), Long.valueOf(228L)); } - @Test(groups = {"testCountBeforeDelete"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"testCountBeforeDelete"}, + dependsOnGroups = {"upsert"}) public void testCountByFilter() { // Test with filter Criterion hasUrnCriterion = @@ -840,17 +1173,23 @@ public void testCountByFilter() { assertEquals(count, 300L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 72L); // test without filter @@ -860,7 +1199,9 @@ public void testCountByFilter() { assertTrue(count >= 300L); } - @Test(groups = {"testCountAfterDelete"}, dependsOnGroups = {"deleteAspectValues1"}) + @Test( + groups = {"testCountAfterDelete"}, + dependsOnGroups = {"deleteAspectValues1"}) public void testCountByFilterAfterDelete() throws InterruptedException { syncAfterWrite(getBulkProcessor()); // Test with filter @@ -872,24 +1213,32 @@ public void testCountByFilterAfterDelete() throws InterruptedException { assertEquals(count, 228L); // Test with filter with multiple criteria - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(_startTime.toString()); - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(_startTime.toString()); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(String.valueOf(_startTime + 23 * TIME_INCREMENT)); Filter urnAndTimeFilter = - QueryUtils.getFilterFromCriteria(ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); + QueryUtils.getFilterFromCriteria( + ImmutableList.of(hasUrnCriterion, startTimeCriterion, endTimeCriterion)); count = - _elasticSearchTimeseriesAspectService.countByFilter(ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); + _elasticSearchTimeseriesAspectService.countByFilter( + ENTITY_NAME, ASPECT_NAME, urnAndTimeFilter); assertEquals(count, 0L); } - @Test(groups = {"getAggregatedStats"}, dependsOnGroups = {"upsert"}) + @Test( + groups = {"getAggregatedStats"}, + dependsOnGroups = {"upsert"}) public void testGetIndexSizes() { List result = _elasticSearchTimeseriesAspectService.getIndexSizes(); - //CHECKSTYLE:OFF + // CHECKSTYLE:OFF /* Example result: {aspectName=testentityprofile, sizeMb=52.234, @@ -897,11 +1246,17 @@ public void testGetIndexSizes() { {aspectName=testentityprofile, sizeMb=0.208, indexName=es_timeseries_aspect_service_test_testentitywithouttests_testentityprofileaspect_v1, entityName=testentitywithouttests} */ - // There may be other indices in there from other tests, so just make sure that index for entity + aspect is in there - //CHECKSTYLE:ON + // There may be other indices in there from other tests, so just make sure that index for entity + // + aspect is in there + // CHECKSTYLE:ON assertTrue(result.size() > 0); assertTrue( - result.stream().anyMatch(idxSizeResult -> idxSizeResult.getIndexName().equals( - "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); + result.stream() + .anyMatch( + idxSizeResult -> + idxSizeResult + .getIndexName() + .equals( + "es_timeseries_aspect_service_test_testentity_testentityprofileaspect_v1"))); } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java index 12a02f954e1bc..29c64abdc4d0d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java +++ b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java @@ -1,5 +1,7 @@ package io.datahubproject.test; +import static org.mockito.Mockito.mock; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -24,13 +26,7 @@ import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.GenericRecordUtils; -import net.datafaker.Faker; import com.linkedin.mxe.MetadataChangeProposal; -import net.datafaker.providers.base.Animal; -import net.datafaker.providers.base.Cat; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URISyntaxException; @@ -46,72 +42,92 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; - -import static org.mockito.Mockito.mock; +import javax.annotation.Nonnull; +import net.datafaker.Faker; +import net.datafaker.providers.base.Animal; +import net.datafaker.providers.base.Cat; +import org.apache.commons.lang3.NotImplementedException; public class DataGenerator { - private final static Faker FAKER = new Faker(); - private final EntityRegistry entityRegistry; - private final EntityService entityService; - - public DataGenerator(EntityService entityService) { - this.entityService = entityService; - this.entityRegistry = entityService.getEntityRegistry(); - } - - public static DataGenerator build(EntityRegistry entityRegistry) { - EntityServiceImpl mockEntityServiceImpl = new EntityServiceImpl(mock(AspectDao.class), - mock(EventProducer.class), entityRegistry, false, - mock(UpdateIndicesService.class), mock(PreProcessHooks.class)); - return new DataGenerator(mockEntityServiceImpl); - } - - public Stream> generateDatasets() { - return generateMCPs("dataset", 10, List.of()); - } - - public List generateTags(long count) { - return generateMCPs("tag", count, List.of()).findFirst().get(); - } - - public Stream> generateMCPs(String entityName, long count, List aspects) { - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - - // Prevent duplicate tags and terms generated as secondary entities - Set secondaryUrns = new HashSet<>(); - - return LongStream.range(0, count).mapToObj(idx -> { - RecordTemplate key = randomKeyAspect(entitySpec); - MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityType(entitySpec.getName()); - mcp.setAspectName(entitySpec.getKeyAspectName()); - mcp.setAspect(GenericRecordUtils.serializeAspect(key)); - mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); - mcp.setChangeType(ChangeType.UPSERT); - return mcp; - }).flatMap(mcp -> { - // Expand with additional random aspects - List additionalMCPs = new LinkedList<>(); - - for (String aspectName : aspects) { + private static final Faker FAKER = new Faker(); + private final EntityRegistry entityRegistry; + private final EntityService entityService; + + public DataGenerator(EntityService entityService) { + this.entityService = entityService; + this.entityRegistry = entityService.getEntityRegistry(); + } + + public static DataGenerator build(EntityRegistry entityRegistry) { + EntityServiceImpl mockEntityServiceImpl = + new EntityServiceImpl( + mock(AspectDao.class), + mock(EventProducer.class), + entityRegistry, + false, + mock(UpdateIndicesService.class), + mock(PreProcessHooks.class)); + return new DataGenerator(mockEntityServiceImpl); + } + + public Stream> generateDatasets() { + return generateMCPs("dataset", 10, List.of()); + } + + public List generateTags(long count) { + return generateMCPs("tag", count, List.of()).findFirst().get(); + } + + public Stream> generateMCPs( + String entityName, long count, List aspects) { + EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); + + // Prevent duplicate tags and terms generated as secondary entities + Set secondaryUrns = new HashSet<>(); + + return LongStream.range(0, count) + .mapToObj( + idx -> { + RecordTemplate key = randomKeyAspect(entitySpec); + MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityType(entitySpec.getName()); + mcp.setAspectName(entitySpec.getKeyAspectName()); + mcp.setAspect(GenericRecordUtils.serializeAspect(key)); + mcp.setEntityUrn(EntityKeyUtils.convertEntityKeyToUrn(key, entityName)); + mcp.setChangeType(ChangeType.UPSERT); + return mcp; + }) + .flatMap( + mcp -> { + // Expand with additional random aspects + List additionalMCPs = new LinkedList<>(); + + for (String aspectName : aspects) { AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); if (aspectSpec == null) { - throw new IllegalStateException("Aspect " + aspectName + " not found for entity " + entityName); + throw new IllegalStateException( + "Aspect " + aspectName + " not found for entity " + entityName); } - RecordTemplate aspect = randomAspectGenerators.getOrDefault(aspectName, - DataGenerator::defaultRandomAspect).apply(entitySpec, aspectSpec); + RecordTemplate aspect = + randomAspectGenerators + .getOrDefault(aspectName, DataGenerator::defaultRandomAspect) + .apply(entitySpec, aspectSpec); // Maybe generate nested entities at the same time, like globalTags/glossaryTerms - List secondaryEntities = nestedRandomAspectGenerators.getOrDefault(aspectSpec.getName(), - (a, c) -> List.of()).apply(aspect, 5).stream() - .filter(secondaryMCP -> { - if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { + List secondaryEntities = + nestedRandomAspectGenerators + .getOrDefault(aspectSpec.getName(), (a, c) -> List.of()) + .apply(aspect, 5) + .stream() + .filter( + secondaryMCP -> { + if (!secondaryUrns.contains(secondaryMCP.getEntityUrn())) { secondaryUrns.add(secondaryMCP.getEntityUrn()); return true; - } - return false; - }) + } + return false; + }) .collect(Collectors.toList()); additionalMCPs.addAll(secondaryEntities); @@ -123,254 +139,327 @@ public Stream> generateMCPs(String entityName, long additionalMCP.setChangeType(ChangeType.UPSERT); additionalMCPs.add(additionalMCP); - } - - return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); - }).map(mcp -> { - // Expand with default aspects per normal - return Stream.concat(Stream.of(mcp), - AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) - .collect(Collectors.toList()); - }); - } - - public static Map> randomAspectGenerators = Map.of( - "glossaryTermInfo", (e, a) -> { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); + } + + return Stream.concat(Stream.of(mcp), additionalMCPs.stream()); + }) + .map( + mcp -> { + // Expand with default aspects per normal + return Stream.concat( + Stream.of(mcp), + AspectUtils.getAdditionalChanges(mcp, entityService, true).stream()) + .collect(Collectors.toList()); + }); + } + + public static Map> + randomAspectGenerators = + Map.of( + "glossaryTermInfo", + (e, a) -> { + GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) defaultRandomAspect(e, a); glossaryTermInfo.setName(normalize(FAKER.company().buzzword())); return glossaryTermInfo; - } - ); - - public Map>> nestedRandomAspectGenerators = Map.of( - "globalTags", (aspect, count) -> { - try { - List tags = generateTags(count); - Method setTagsMethod = aspect.getClass().getMethod("setTags", TagAssociationArray.class); - TagAssociationArray tagAssociations = new TagAssociationArray(); - tagAssociations.addAll(tags.stream().map( - tagMCP -> { - try { - return new TagAssociation().setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTagsMethod.invoke(aspect, tagAssociations); - return tags; - } catch (Exception e) { - throw new RuntimeException(e); - } - }, - "glossaryTerms", (aspect, count) -> { - try { - List terms = generateMCPs("glossaryTerm", count, - List.of("glossaryTermInfo")) - .map(mcps -> mcps.get(0)) - .collect(Collectors.toList()); - Method setTermsMethod = aspect.getClass().getMethod("setTerms", GlossaryTermAssociationArray.class); - GlossaryTermAssociationArray termAssociations = new GlossaryTermAssociationArray(); - termAssociations.addAll(terms.stream().map( - termMCP -> { - try { - return new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(termMCP.getEntityUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - ).collect(Collectors.toList())); - setTermsMethod.invoke(aspect, termAssociations); - return terms; - } catch (Exception e) { - throw new RuntimeException(e); - } - } - ); - - private static RecordTemplate defaultRandomAspect(@Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { - Class aspectClass = aspectSpec.getDataTemplateClass(); - try { - Object aspect = aspectClass.getDeclaredConstructor().newInstance(); - - List booleanMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Boolean.class) - .collect(Collectors.toList()); - - for (Method boolMethod : booleanMethods) { - boolMethod.invoke(aspect, FAKER.random().nextBoolean()); - } - - List stringMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - for (Method stringMethod : stringMethods) { - String value; - switch (aspectSpec.getName() + "_" + stringMethod.getName()) { - default: - value = FAKER.lorem().characters(8, 16, false); - break; - } - - // global - if (stringMethod.getName().toLowerCase().contains("description") - || stringMethod.getName().toLowerCase().contains("definition")) { - value = FAKER.lorem().paragraph(); - } + }); + + public Map>> + nestedRandomAspectGenerators = + Map.of( + "globalTags", + (aspect, count) -> { + try { + List tags = generateTags(count); + Method setTagsMethod = + aspect.getClass().getMethod("setTags", TagAssociationArray.class); + TagAssociationArray tagAssociations = new TagAssociationArray(); + tagAssociations.addAll( + tags.stream() + .map( + tagMCP -> { + try { + return new TagAssociation() + .setTag(TagUrn.createFromUrn(tagMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTagsMethod.invoke(aspect, tagAssociations); + return tags; + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + "glossaryTerms", + (aspect, count) -> { + try { + List terms = + generateMCPs("glossaryTerm", count, List.of("glossaryTermInfo")) + .map(mcps -> mcps.get(0)) + .collect(Collectors.toList()); + Method setTermsMethod = + aspect + .getClass() + .getMethod("setTerms", GlossaryTermAssociationArray.class); + GlossaryTermAssociationArray termAssociations = + new GlossaryTermAssociationArray(); + termAssociations.addAll( + terms.stream() + .map( + termMCP -> { + try { + return new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + termMCP.getEntityUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toList())); + setTermsMethod.invoke(aspect, termAssociations); + return terms; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + + private static RecordTemplate defaultRandomAspect( + @Nonnull EntitySpec entitySpec, @Nonnull AspectSpec aspectSpec) { + Class aspectClass = aspectSpec.getDataTemplateClass(); + try { + Object aspect = aspectClass.getDeclaredConstructor().newInstance(); + + List booleanMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Boolean.class) + .collect(Collectors.toList()); + + for (Method boolMethod : booleanMethods) { + boolMethod.invoke(aspect, FAKER.random().nextBoolean()); + } + + List stringMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + for (Method stringMethod : stringMethods) { + String value; + switch (aspectSpec.getName() + "_" + stringMethod.getName()) { + default: + value = FAKER.lorem().characters(8, 16, false); + break; + } - stringMethod.invoke(aspect, value); - } - - List enumMethods = Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - // auditStamp - Arrays.stream(aspectClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == AuditStamp.class) - .findFirst().ifPresent(auditStampMethod -> { - try { - AuditStamp auditStamp = new AuditStamp() - .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) - .setTime(System.currentTimeMillis()); - auditStampMethod.invoke(aspect, auditStamp); - } catch (URISyntaxException | IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); - - return aspectClass.cast(aspect); - } catch (Exception e) { - throw new RuntimeException(e); + // global + if (stringMethod.getName().toLowerCase().contains("description") + || stringMethod.getName().toLowerCase().contains("definition")) { + value = FAKER.lorem().paragraph(); } - } - private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { - Class keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); - try { - Object key = keyClass.getDeclaredConstructor().newInstance(); - - List stringMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == String.class) - .collect(Collectors.toList()); - - switch (entitySpec.getName()) { - case "tag": - stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); - break; - case "glossaryTerm": - stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); - break; - case "container": - stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); - break; - default: - switch (stringMethods.size()) { - case 1: - stringMethods.get(0).invoke(key, String.join(".", multiName(3))); - break; - case 2: - Cat cat = FAKER.cat(); - stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); - stringMethods.get(1).invoke(key, cat.name().toLowerCase()); - break; - default: - Animal animal = FAKER.animal(); - stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); - stringMethods.get(1).invoke(key, animal.species().toLowerCase()); - stringMethods.get(2).invoke(key, animal.name().toLowerCase()); - break; - } - break; - } - - List urnMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0] == Urn.class) - .collect(Collectors.toList()); - - for (Method urnMethod : urnMethods) { - switch (entitySpec.getName()) { - case "dataset": - urnMethod.invoke(key, randomUrnLowerCase("dataPlatform", - List.of(randomDataPlatform()))); - break; - default: - throw new NotImplementedException(entitySpec.getName()); + stringMethod.invoke(aspect, value); + } + + List enumMethods = + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(aspect, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + // auditStamp + Arrays.stream(aspectClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == AuditStamp.class) + .findFirst() + .ifPresent( + auditStampMethod -> { + try { + AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.DATAHUB_ACTOR)) + .setTime(System.currentTimeMillis()); + auditStampMethod.invoke(aspect, auditStamp); + } catch (URISyntaxException + | IllegalAccessException + | InvocationTargetException e) { + throw new RuntimeException(e); } - } - - List enumMethods = Arrays.stream(keyClass.getMethods()) - .filter(m -> m.getName().startsWith("set") - && m.getParameterCount() == 1 - && m.getParameterTypes()[0].isEnum()) - .collect(Collectors.toList()); - - for (Method enumMethod : enumMethods) { - Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); - // Excluding $UNKNOWNs - enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); - } - - return keyClass.cast(key); - } catch (Exception e) { - throw new RuntimeException(e); - } - } + }); - private static List multiName(int size) { - switch (size) { + return aspectClass.cast(aspect); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private static RecordTemplate randomKeyAspect(EntitySpec entitySpec) { + Class keyClass = entitySpec.getKeyAspectSpec().getDataTemplateClass(); + try { + Object key = keyClass.getDeclaredConstructor().newInstance(); + + List stringMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == String.class) + .collect(Collectors.toList()); + + switch (entitySpec.getName()) { + case "tag": + stringMethods.get(0).invoke(key, normalize(FAKER.marketing().buzzwords())); + break; + case "glossaryTerm": + stringMethods.get(0).invoke(key, normalize(UUID.randomUUID().toString())); + break; + case "container": + stringMethods.get(0).invoke(key, FAKER.examplify("b5e95fce839e7d78151ed7e0a7420d84")); + break; + default: + switch (stringMethods.size()) { case 1: - return Stream.of(FAKER.marketing().buzzwords()) - .map(String::toLowerCase).collect(Collectors.toList()); + stringMethods.get(0).invoke(key, String.join(".", multiName(3))); + break; case 2: - Cat cat = FAKER.cat(); - return Stream.of(cat.breed(), cat.name()) - .map(String::toLowerCase).collect(Collectors.toList()); - case 3: - Animal animal = FAKER.animal(); - return Stream.of(animal.genus(), animal.species(), animal.name()) - .map(String::toLowerCase).collect(Collectors.toList()); + Cat cat = FAKER.cat(); + stringMethods.get(0).invoke(key, cat.breed().toLowerCase()); + stringMethods.get(1).invoke(key, cat.name().toLowerCase()); + break; default: - return IntStream.range(0, size).mapToObj(i -> FAKER.expression("#{numerify 'test####'}")).collect(Collectors.toList()); + Animal animal = FAKER.animal(); + stringMethods.get(0).invoke(key, animal.genus().toLowerCase()); + stringMethods.get(1).invoke(key, animal.species().toLowerCase()); + stringMethods.get(2).invoke(key, animal.name().toLowerCase()); + break; + } + break; + } + + List urnMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0] == Urn.class) + .collect(Collectors.toList()); + + for (Method urnMethod : urnMethods) { + switch (entitySpec.getName()) { + case "dataset": + urnMethod.invoke( + key, randomUrnLowerCase("dataPlatform", List.of(randomDataPlatform()))); + break; + default: + throw new NotImplementedException(entitySpec.getName()); } + } + + List enumMethods = + Arrays.stream(keyClass.getMethods()) + .filter( + m -> + m.getName().startsWith("set") + && m.getParameterCount() == 1 + && m.getParameterTypes()[0].isEnum()) + .collect(Collectors.toList()); + + for (Method enumMethod : enumMethods) { + Object[] enumClass = enumMethod.getParameterTypes()[0].getEnumConstants(); + // Excluding $UNKNOWNs + enumMethod.invoke(key, enumClass[FAKER.random().nextInt(0, enumClass.length - 2)]); + } + + return keyClass.cast(key); + } catch (Exception e) { + throw new RuntimeException(e); } - - private static Urn randomUrnLowerCase(String entityType, List tuple) { - return Urn.createFromTuple(entityType, - tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); - } - - private static String normalize(String input) { - return input.toLowerCase().replaceAll("\\W+", "_"); - } - - private static String randomDataPlatform() { - String[] platforms = { - "ambry", "bigquery", "couchbase", "druid", "external", "feast", "glue", "hdfs", "hive", "kafka", "kusto", - "looker", "mongodb", "mssql", "mysql", "oracle", "pinot", "postgres", "presto", "redshift", "s3", - "sagemaker", "snowflake", "teradata", "voldemort" - }; - - return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } + + private static List multiName(int size) { + switch (size) { + case 1: + return Stream.of(FAKER.marketing().buzzwords()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 2: + Cat cat = FAKER.cat(); + return Stream.of(cat.breed(), cat.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + case 3: + Animal animal = FAKER.animal(); + return Stream.of(animal.genus(), animal.species(), animal.name()) + .map(String::toLowerCase) + .collect(Collectors.toList()); + default: + return IntStream.range(0, size) + .mapToObj(i -> FAKER.expression("#{numerify 'test####'}")) + .collect(Collectors.toList()); } + } + + private static Urn randomUrnLowerCase(String entityType, List tuple) { + return Urn.createFromTuple( + entityType, tuple.stream().map(DataGenerator::normalize).collect(Collectors.toList())); + } + + private static String normalize(String input) { + return input.toLowerCase().replaceAll("\\W+", "_"); + } + + private static String randomDataPlatform() { + String[] platforms = { + "ambry", + "bigquery", + "couchbase", + "druid", + "external", + "feast", + "glue", + "hdfs", + "hive", + "kafka", + "kusto", + "looker", + "mongodb", + "mssql", + "mysql", + "oracle", + "pinot", + "postgres", + "presto", + "redshift", + "s3", + "sagemaker", + "snowflake", + "teradata", + "voldemort" + }; + + return platforms[FAKER.random().nextInt(0, platforms.length - 1)]; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java index 18fbf86f8668d..ff14b91a72c7f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/EntityExporter.java @@ -1,5 +1,10 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + +import java.io.IOException; +import java.util.Set; +import java.util.stream.Collectors; import lombok.Builder; import lombok.NonNull; import org.opensearch.action.search.SearchRequest; @@ -11,56 +16,50 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import java.io.IOException; -import java.util.Set; -import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Builder public class EntityExporter { - @NonNull - private RestHighLevelClient client; - @Builder.Default - private int fetchSize = 3000; - @NonNull - private FixtureWriter writer; - @NonNull - private String fixtureName; - @Builder.Default - private String sourceIndexPrefix = ""; - @Builder.Default - private String sourceIndexSuffix = "index_v2"; - @Builder.Default - private Set indexEntities = SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) - .collect(Collectors.toSet()); - + @NonNull private RestHighLevelClient client; + @Builder.Default private int fetchSize = 3000; + @NonNull private FixtureWriter writer; + @NonNull private String fixtureName; + @Builder.Default private String sourceIndexPrefix = ""; + @Builder.Default private String sourceIndexSuffix = "index_v2"; - public void export() throws IOException { - Set searchIndexSuffixes = indexEntities.stream() - .map(entityName -> entityName + sourceIndexSuffix) - .collect(Collectors.toSet()); + @Builder.Default + private Set indexEntities = + SEARCHABLE_ENTITY_TYPES.stream() + .map(entityType -> entityType.toString().toLowerCase().replaceAll("_", "")) + .collect(Collectors.toSet()); - // Fetch indices - GetMappingsResponse response = client.indices().getMapping(new GetMappingsRequest().indices("*"), - RequestOptions.DEFAULT); + public void export() throws IOException { + Set searchIndexSuffixes = + indexEntities.stream() + .map(entityName -> entityName + sourceIndexSuffix) + .collect(Collectors.toSet()); - response.mappings().keySet().stream() - .filter(index -> searchIndexSuffixes.stream().anyMatch(index::contains) - && index.startsWith(sourceIndexPrefix)) - .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) - .forEach(indexName -> { + // Fetch indices + GetMappingsResponse response = + client.indices().getMapping(new GetMappingsRequest().indices("*"), RequestOptions.DEFAULT); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + response.mappings().keySet().stream() + .filter( + index -> + searchIndexSuffixes.stream().anyMatch(index::contains) + && index.startsWith(sourceIndexPrefix)) + .map(index -> index.split(sourceIndexSuffix, 2)[0] + sourceIndexSuffix) + .forEach( + indexName -> { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - SearchRequest searchRequest = new SearchRequest(indexName); - searchRequest.source(searchSourceBuilder); + SearchRequest searchRequest = new SearchRequest(indexName); + searchRequest.source(searchSourceBuilder); - String outputPath = String.format("%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); - writer.write(searchRequest, outputPath, false); - }); - } + String outputPath = + String.format( + "%s/%s.json", fixtureName, indexName.replaceFirst(sourceIndexPrefix, "")); + writer.write(searchRequest, outputPath, false); + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java index 1b804a2346883..dff6b7ab5a898 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureReader.java @@ -1,13 +1,9 @@ package io.datahubproject.test.fixtures.search; +import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; + import com.fasterxml.jackson.core.JsonProcessingException; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; -import lombok.Builder; -import lombok.NonNull; -import org.apache.commons.io.FilenameUtils; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.common.xcontent.XContentType; - import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.Closeable; @@ -22,105 +18,113 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; - -import static io.datahubproject.test.fixtures.search.SearchFixtureUtils.OBJECT_MAPPER; +import lombok.Builder; +import lombok.NonNull; +import org.apache.commons.io.FilenameUtils; +import org.opensearch.action.index.IndexRequest; +import org.opensearch.common.xcontent.XContentType; @Builder public class FixtureReader { - @Builder.Default - private String inputBase = SearchFixtureUtils.FIXTURE_BASE; - @NonNull - private ESBulkProcessor bulkProcessor; - @NonNull - private String fixtureName; - @Builder.Default - private String targetIndexPrefix = ""; + @Builder.Default private String inputBase = SearchFixtureUtils.FIXTURE_BASE; + @NonNull private ESBulkProcessor bulkProcessor; + @NonNull private String fixtureName; + @Builder.Default private String targetIndexPrefix = ""; - private long refreshIntervalSeconds; + private long refreshIntervalSeconds; - public Set read() throws IOException { - try (Stream files = Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { - return files.map(file -> { + public Set read() throws IOException { + try (Stream files = + Files.list(Paths.get(String.format("%s/%s", inputBase, fixtureName)))) { + return files + .map( + file -> { String absolutePath = file.toAbsolutePath().toString(); - String indexName = String.format("%s_%s", targetIndexPrefix, FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); + String indexName = + String.format( + "%s_%s", + targetIndexPrefix, + FilenameUtils.getBaseName(absolutePath).split("[.]", 2)[0]); try (Stream lines = getLines(absolutePath)) { - lines.forEach(line -> { + lines.forEach( + line -> { try { - UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); - IndexRequest request = new IndexRequest(indexName) - .id(doc.urn) - .source(line.getBytes(), XContentType.JSON); + UrnDocument doc = OBJECT_MAPPER.readValue(line, UrnDocument.class); + IndexRequest request = + new IndexRequest(indexName) + .id(doc.urn) + .source(line.getBytes(), XContentType.JSON); - bulkProcessor.add(request); + bulkProcessor.add(request); } catch (JsonProcessingException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }); + }); } catch (IOException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } return indexName; - }).collect(Collectors.toSet()); - } finally { - bulkProcessor.flush(); - try { - Thread.sleep(1000 * refreshIntervalSeconds); - } catch (InterruptedException ignored) { - } - } + }) + .collect(Collectors.toSet()); + } finally { + bulkProcessor.flush(); + try { + Thread.sleep(1000 * refreshIntervalSeconds); + } catch (InterruptedException ignored) { + } } + } - private Stream getLines(String path) throws IOException { - if (FilenameUtils.getExtension(path).equals("gz")) { - return GZIPFiles.lines(Paths.get(path)); - } else { - return Files.lines(Paths.get(path)); - } + private Stream getLines(String path) throws IOException { + if (FilenameUtils.getExtension(path).equals("gz")) { + return GZIPFiles.lines(Paths.get(path)); + } else { + return Files.lines(Paths.get(path)); } + } - public static class GZIPFiles { - /** - * Get a lazily loaded stream of lines from a gzipped file, similar to - * {@link Files#lines(java.nio.file.Path)}. - * - * @param path - * The path to the gzipped file. - * @return stream with lines. - */ - public static Stream lines(Path path) { - InputStream fileIs = null; - BufferedInputStream bufferedIs = null; - GZIPInputStream gzipIs = null; - try { - fileIs = Files.newInputStream(path); - // Even though GZIPInputStream has a buffer it reads individual bytes - // when processing the header, better add a buffer in-between - bufferedIs = new BufferedInputStream(fileIs, 65535); - gzipIs = new GZIPInputStream(bufferedIs); - } catch (IOException e) { - closeSafely(gzipIs); - closeSafely(bufferedIs); - closeSafely(fileIs); - throw new UncheckedIOException(e); - } - BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); - return reader.lines().onClose(() -> closeSafely(reader)); - } + public static class GZIPFiles { + /** + * Get a lazily loaded stream of lines from a gzipped file, similar to {@link + * Files#lines(java.nio.file.Path)}. + * + * @param path The path to the gzipped file. + * @return stream with lines. + */ + public static Stream lines(Path path) { + InputStream fileIs = null; + BufferedInputStream bufferedIs = null; + GZIPInputStream gzipIs = null; + try { + fileIs = Files.newInputStream(path); + // Even though GZIPInputStream has a buffer it reads individual bytes + // when processing the header, better add a buffer in-between + bufferedIs = new BufferedInputStream(fileIs, 65535); + gzipIs = new GZIPInputStream(bufferedIs); + } catch (IOException e) { + closeSafely(gzipIs); + closeSafely(bufferedIs); + closeSafely(fileIs); + throw new UncheckedIOException(e); + } + BufferedReader reader = new BufferedReader(new InputStreamReader(gzipIs)); + return reader.lines().onClose(() -> closeSafely(reader)); + } - private static void closeSafely(Closeable closeable) { - if (closeable != null) { - try { - closeable.close(); - } catch (IOException e) { - // Ignore - } - } + private static void closeSafely(Closeable closeable) { + if (closeable != null) { + try { + closeable.close(); + } catch (IOException e) { + // Ignore } + } } + } - public static class UrnDocument { - public String urn; - } + public static class UrnDocument { + public String urn; + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java index 0aefa006421fc..8a11de6c513a3 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/FixtureWriter.java @@ -1,6 +1,11 @@ package io.datahubproject.test.fixtures.search; import com.fasterxml.jackson.core.JsonProcessingException; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.function.BiConsumer; +import javax.annotation.Nullable; import lombok.Builder; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; @@ -9,70 +14,72 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import javax.annotation.Nullable; -import java.io.BufferedWriter; -import java.io.FileWriter; -import java.io.IOException; -import java.util.function.BiConsumer; - -/** - * - */ +/** */ @Builder public class FixtureWriter { - private RestHighLevelClient client; + private RestHighLevelClient client; - @Builder.Default - private String outputBase = SearchFixtureUtils.FIXTURE_BASE; + @Builder.Default private String outputBase = SearchFixtureUtils.FIXTURE_BASE; - public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { - write(searchRequest, relativeOutput, append, null, null, null); - } + public void write(SearchRequest searchRequest, String relativeOutput, boolean append) { + write(searchRequest, relativeOutput, append, null, null, null); + } - public void write(SearchRequest searchRequest, String relativeOutput, boolean append, - @Nullable Class outputType, Class callbackType, BiConsumer callback) { - try { - SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); - SearchHits hits = searchResponse.getHits(); - long remainingHits = hits.getTotalHits().value; + public void write( + SearchRequest searchRequest, + String relativeOutput, + boolean append, + @Nullable Class outputType, + Class callbackType, + BiConsumer callback) { + try { + SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT); + SearchHits hits = searchResponse.getHits(); + long remainingHits = hits.getTotalHits().value; - if (remainingHits > 0) { - try (FileWriter writer = new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); - BufferedWriter bw = new BufferedWriter(writer)) { + if (remainingHits > 0) { + try (FileWriter writer = + new FileWriter(String.format("%s/%s", outputBase, relativeOutput), append); + BufferedWriter bw = new BufferedWriter(writer)) { - while (remainingHits > 0) { - SearchHit lastHit = null; - for (SearchHit hit : hits.getHits()) { - lastHit = hit; - remainingHits -= 1; + while (remainingHits > 0) { + SearchHit lastHit = null; + for (SearchHit hit : hits.getHits()) { + lastHit = hit; + remainingHits -= 1; - try { - if (outputType == null) { - bw.write(hit.getSourceAsString()); - } else { - O doc = SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), outputType); - bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); - } - bw.newLine(); + try { + if (outputType == null) { + bw.write(hit.getSourceAsString()); + } else { + O doc = + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), outputType); + bw.write(SearchFixtureUtils.OBJECT_MAPPER.writeValueAsString(doc)); + } + bw.newLine(); - // Fire callback - if (callback != null) { - callback.accept(hit, SearchFixtureUtils.OBJECT_MAPPER.readValue(hit.getSourceAsString(), callbackType)); - } - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - if (lastHit != null) { - searchRequest.source().searchAfter(lastHit.getSortValues()); - hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); - } - } + // Fire callback + if (callback != null) { + callback.accept( + hit, + SearchFixtureUtils.OBJECT_MAPPER.readValue( + hit.getSourceAsString(), callbackType)); } + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + if (lastHit != null) { + searchRequest.source().searchAfter(lastHit.getSortValues()); + hits = client.search(searchRequest, RequestOptions.DEFAULT).getHits(); } - } catch (IOException e) { - throw new RuntimeException(e); + } } + } + } catch (IOException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java index 5db07ee6fb8bc..4b7d81aa04416 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/LineageExporter.java @@ -1,15 +1,6 @@ package io.datahubproject.test.fixtures.search; import com.google.common.collect.Lists; -import lombok.Builder; -import lombok.NonNull; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.search.sort.SortBuilders; -import org.opensearch.search.sort.SortOrder; - import java.net.URLDecoder; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; @@ -20,174 +11,210 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import lombok.NonNull; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.search.sort.SortBuilders; +import org.opensearch.search.sort.SortOrder; @Builder public class LineageExporter { - @Builder.Default - private int fetchSize = 3000; - @Builder.Default - private int queryStatementSize = 32000; - @NonNull - private FixtureWriter writer; - private String entityIndexName; - - private String graphIndexName; - - private String entityOutputPath; - private String graphOutputPath; - - private Class anonymizerClazz; - - - private static String idToUrn(String id) { - return URLDecoder.decode(id, StandardCharsets.UTF_8); + @Builder.Default private int fetchSize = 3000; + @Builder.Default private int queryStatementSize = 32000; + @NonNull private FixtureWriter writer; + private String entityIndexName; + + private String graphIndexName; + + private String entityOutputPath; + private String graphOutputPath; + + private Class anonymizerClazz; + + private static String idToUrn(String id) { + return URLDecoder.decode(id, StandardCharsets.UTF_8); + } + + public void export(Set ids) { + if (entityIndexName != null) { + assert (entityOutputPath != null); + exportEntityIndex( + ids.stream() + .map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()), + new HashSet<>(), + 0); } - - public void export(Set ids) { - if (entityIndexName != null) { - assert (entityOutputPath != null); - exportEntityIndex(ids.stream().map(id -> URLEncoder.encode(id, StandardCharsets.UTF_8)).collect(Collectors.toSet()), - new HashSet<>(), 0); - } - if (graphIndexName != null) { - assert (graphOutputPath != null); - exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); - } + if (graphIndexName != null) { + assert (graphOutputPath != null); + exportGraphIndex(ids, new HashSet<>(), new HashSet<>(), 0); } - - public void exportGraphIndex(Set urns, Set visitedUrns, Set visitedIds, int hops) { - Set nextIds = new HashSet<>(); - if (!urns.isEmpty()) { - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> { - boolQueryBuilder.should(QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); - boolQueryBuilder.should(QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); - }); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(graphIndexName); - searchRequest.source(searchSourceBuilder); - - Set docIds = new HashSet<>(); - Set docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); - - writer.write(searchRequest, graphOutputPath, hops != 0, anonymizerClazz, - GraphDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - visitedUrns.addAll(urns); - - Set discoveredUrns = docs.stream().flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) - .filter(Objects::nonNull) - .filter(urn -> !visitedUrns.contains(urn)) - .collect(Collectors.toSet()); - - nextIds.addAll(discoveredUrns); - } - - if (!nextIds.isEmpty()) { - exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); - } + } + + public void exportGraphIndex( + Set urns, Set visitedUrns, Set visitedIds, int hops) { + Set nextIds = new HashSet<>(); + if (!urns.isEmpty()) { + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + boolQueryBuilder.must(QueryBuilders.termQuery("relationshipType", "DownstreamOf")); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> { + boolQueryBuilder.should( + QueryBuilders.termsQuery("source.urn", batch.toArray(String[]::new))); + boolQueryBuilder.should( + QueryBuilders.termsQuery("destination.urn", batch.toArray(String[]::new))); + }); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(graphIndexName); + searchRequest.source(searchSourceBuilder); + + Set docIds = new HashSet<>(); + Set docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf( + "Hops: %s (Ids: %s) [VisitedIds: %s]", hops, urns.size(), visitedUrns.size()); + + writer.write( + searchRequest, + graphOutputPath, + hops != 0, + anonymizerClazz, + GraphDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + visitedUrns.addAll(urns); + + Set discoveredUrns = + docs.stream() + .flatMap(d -> Stream.of(d.destination.urn, d.source.urn)) + .filter(Objects::nonNull) + .filter(urn -> !visitedUrns.contains(urn)) + .collect(Collectors.toSet()); + + nextIds.addAll(discoveredUrns); } - public void exportEntityIndex(Set ids, Set visitedIds, int hops) { - Set nextIds = new HashSet<>(); - - if (!ids.isEmpty()) { - Set urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); - - BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); - - Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new))) - ); - Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.should(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - boolQueryBuilder.minimumShouldMatch(1); - - // Exclude visited - Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize).forEach(batch -> - boolQueryBuilder.mustNot(QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new))) - ); - - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); - searchSourceBuilder.size(fetchSize); - searchSourceBuilder.query(boolQueryBuilder); - searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); - - SearchRequest searchRequest = new SearchRequest(entityIndexName); - searchRequest.source(searchSourceBuilder); - - Set docIds = new HashSet<>(); - Set docs = new HashSet<>(); - - long startTime = System.currentTimeMillis(); - System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); - - writer.write(searchRequest, entityOutputPath, hops != 0, anonymizerClazz, - UrnDocument.class, (hit, doc) -> { - docIds.add(hit.getId()); - docs.add(doc); - }); - - long endTime = System.currentTimeMillis(); - System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); - - visitedIds.addAll(docIds); - - nextIds.addAll(docIds.stream() - .filter(Objects::nonNull) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - nextIds.addAll(docs.stream() - .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) - .flatMap(doc -> doc.upstreams.stream()) - .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) - .filter(docId -> !visitedIds.contains(docId)) - .collect(Collectors.toSet())); - } - - if (!nextIds.isEmpty()) { - exportEntityIndex(nextIds, visitedIds, hops + 1); - } + if (!nextIds.isEmpty()) { + exportGraphIndex(nextIds, visitedUrns, visitedIds, hops + 1); + } + } + + public void exportEntityIndex(Set ids, Set visitedIds, int hops) { + Set nextIds = new HashSet<>(); + + if (!ids.isEmpty()) { + Set urns = ids.stream().map(LineageExporter::idToUrn).collect(Collectors.toSet()); + + BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + Lists.partition(Arrays.asList(urns.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.termsQuery("upstreams.keyword", batch.toArray(String[]::new)))); + Lists.partition(Arrays.asList(ids.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.should( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + boolQueryBuilder.minimumShouldMatch(1); + + // Exclude visited + Lists.partition(Arrays.asList(visitedIds.toArray(String[]::new)), queryStatementSize) + .forEach( + batch -> + boolQueryBuilder.mustNot( + QueryBuilders.idsQuery().addIds(batch.toArray(String[]::new)))); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.size(fetchSize); + searchSourceBuilder.query(boolQueryBuilder); + searchSourceBuilder.sort(SortBuilders.fieldSort("_id").order(SortOrder.ASC)); + + SearchRequest searchRequest = new SearchRequest(entityIndexName); + searchRequest.source(searchSourceBuilder); + + Set docIds = new HashSet<>(); + Set docs = new HashSet<>(); + + long startTime = System.currentTimeMillis(); + System.out.printf("Hops: %s (Ids: %s) [VisitedIds: %s]", hops, ids.size(), visitedIds.size()); + + writer.write( + searchRequest, + entityOutputPath, + hops != 0, + anonymizerClazz, + UrnDocument.class, + (hit, doc) -> { + docIds.add(hit.getId()); + docs.add(doc); + }); + + long endTime = System.currentTimeMillis(); + System.out.printf(" Time: %ss%n", (endTime - startTime) / 1000); + + visitedIds.addAll(docIds); + + nextIds.addAll( + docIds.stream() + .filter(Objects::nonNull) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); + nextIds.addAll( + docs.stream() + .filter(doc -> doc.upstreams != null && !doc.upstreams.isEmpty()) + .flatMap(doc -> doc.upstreams.stream()) + .map(urn -> URLEncoder.encode(urn, StandardCharsets.UTF_8)) + .filter(docId -> !visitedIds.contains(docId)) + .collect(Collectors.toSet())); } - public static class UrnDocument { - public String urn; - public List upstreams; + if (!nextIds.isEmpty()) { + exportEntityIndex(nextIds, visitedIds, hops + 1); } + } + + public static class UrnDocument { + public String urn; + public List upstreams; + } - public static class GraphDocument { - public String relationshipType; - public GraphNode source; - public GraphNode destination; + public static class GraphDocument { + public String relationshipType; + public GraphNode source; + public GraphNode destination; - public static class GraphNode { - public String urn; - public String entityType; - } + public static class GraphNode { + public String urn; + public String entityType; } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java index 45bbd912bc794..14e5259f90097 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SampleDataFixtureConfiguration.java @@ -1,15 +1,20 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import static com.linkedin.metadata.Constants.*; +import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.search.CustomConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.entity.EntityAspectIdentifier; @@ -31,8 +36,11 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -42,244 +50,245 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static io.datahubproject.test.search.config.SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS; -import static org.mockito.ArgumentMatchers.anySet; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SampleDataFixtureConfiguration { - /** - * Interested in adding more fixtures? Here's what you will need to update? - * 1. Create a new indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each other, - * overwriting each other - * 2. Create a new IndexConvention, IndexBuilder, and EntityClient. These are needed - * to index a different set of entities. - */ - - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private RestHighLevelClient _longTailSearchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "sampleDataPrefix") - protected String sampleDataPrefix() { - return "smpldat"; - } - - @Bean(name = "longTailPrefix") - protected String longTailIndexPrefix() { - return "lngtl"; - } - - @Bean(name = "sampleDataIndexConvention") - protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "longTailIndexConvention") - protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "sampleDataFixtureName") - protected String sampleDataFixtureName() { - return "sample_data"; - } - - @Bean(name = "longTailFixtureName") - protected String longTailFixtureName() { - return "long_tail"; - } - - @Bean(name = "sampleDataEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(entityRegistry, indexConvention); - } - - @Bean(name = "longTailEntityIndexBuilders") - protected EntityIndexBuilders longTailEntityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailIndexConvention") IndexConvention indexConvention - ) { - return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); - } - - protected EntityIndexBuilders entityIndexBuildersHelper( - EntityRegistry entityRegistry, - IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "sampleDataEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention - ) throws IOException { - return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); - } - - @Bean(name = "longTailEntitySearchService") - protected ElasticSearchService longTailEntitySearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, - @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention - ) throws IOException { - return entitySearchServiceHelper(longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); - } - - protected ElasticSearchService entitySearchServiceHelper( - EntityRegistry entityRegistry, - EntityIndexBuilders indexBuilders, - IndexConvention indexConvention - ) throws IOException { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_fixture_test.yml"); - CustomSearchConfiguration customSearchConfiguration = customConfiguration.resolve(new YAMLMapper()); - - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, customSearchConfiguration); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "sampleDataSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("sampleDataPrefix") String prefix, - @Qualifier("sampleDataFixtureName") String sampleDataFixtureName - ) throws IOException { - return searchServiceHelper(entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); - } - - @Bean(name = "longTailSearchService") - @Nonnull - protected SearchService longTailSearchService( - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, - @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, - @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, - @Qualifier("longTailPrefix") String longTailPrefix, - @Qualifier("longTailFixtureName") String longTailFixtureName - ) throws IOException { - return searchServiceHelper(longTailEntityRegistry, longTailEntitySearchService, longTailIndexBuilders, longTailPrefix, longTailFixtureName); - } - - public SearchService searchServiceHelper( - EntityRegistry entityRegistry, - ElasticSearchService entitySearchService, - EntityIndexBuilders indexBuilders, - String prefix, - String fixtureName - ) throws IOException { - int batchSize = 100; - SearchRanker ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices & write fixture data - indexBuilders.reindexAll(); - - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return service; - } - - @Bean(name = "sampleDataEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, entityRegistry); - } - - @Bean(name = "longTailEntityClient") - @Nonnull - protected EntityClient longTailEntityClient( - @Qualifier("sampleDataSearchService") SearchService searchService, - @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry - ) { - return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); - } - - private EntityClient entityClientHelper( - SearchService searchService, - ElasticSearchService entitySearchService, - EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - AspectDao mockAspectDao = mock(AspectDao.class); - when(mockAspectDao.batchGet(anySet())).thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + /** + * Interested in adding more fixtures? Here's what you will need to update? 1. Create a new + * indexPrefix and FixtureName. Both are needed or else all fixtures will load on top of each + * other, overwriting each other 2. Create a new IndexConvention, IndexBuilder, and EntityClient. + * These are needed to index a different set of entities. + */ + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private RestHighLevelClient _longTailSearchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "sampleDataPrefix") + protected String sampleDataPrefix() { + return "smpldat"; + } + + @Bean(name = "longTailPrefix") + protected String longTailIndexPrefix() { + return "lngtl"; + } + + @Bean(name = "sampleDataIndexConvention") + protected IndexConvention indexConvention(@Qualifier("sampleDataPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "longTailIndexConvention") + protected IndexConvention longTailIndexConvention(@Qualifier("longTailPrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "sampleDataFixtureName") + protected String sampleDataFixtureName() { + return "sample_data"; + } + + @Bean(name = "longTailFixtureName") + protected String longTailFixtureName() { + return "long_tail"; + } + + @Bean(name = "sampleDataEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(entityRegistry, indexConvention); + } + + @Bean(name = "longTailEntityIndexBuilders") + protected EntityIndexBuilders longTailEntityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailIndexConvention") IndexConvention indexConvention) { + return entityIndexBuildersHelper(longTailEntityRegistry, indexConvention); + } + + protected EntityIndexBuilders entityIndexBuildersHelper( + EntityRegistry entityRegistry, IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "sampleDataEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataIndexConvention") IndexConvention indexConvention) + throws IOException { + return entitySearchServiceHelper(entityRegistry, indexBuilders, indexConvention); + } + + @Bean(name = "longTailEntitySearchService") + protected ElasticSearchService longTailEntitySearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailEndexBuilders, + @Qualifier("longTailIndexConvention") IndexConvention longTailIndexConvention) + throws IOException { + return entitySearchServiceHelper( + longTailEntityRegistry, longTailEndexBuilders, longTailIndexConvention); + } + + protected ElasticSearchService entitySearchServiceHelper( + EntityRegistry entityRegistry, + EntityIndexBuilders indexBuilders, + IndexConvention indexConvention) + throws IOException { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_fixture_test.yml"); + CustomSearchConfiguration customSearchConfiguration = + customConfiguration.resolve(new YAMLMapper()); + + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + customSearchConfiguration); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "sampleDataSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("sampleDataEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("sampleDataPrefix") String prefix, + @Qualifier("sampleDataFixtureName") String sampleDataFixtureName) + throws IOException { + return searchServiceHelper( + entityRegistry, entitySearchService, indexBuilders, prefix, sampleDataFixtureName); + } + + @Bean(name = "longTailSearchService") + @Nonnull + protected SearchService longTailSearchService( + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry, + @Qualifier("longTailEntitySearchService") ElasticSearchService longTailEntitySearchService, + @Qualifier("longTailEntityIndexBuilders") EntityIndexBuilders longTailIndexBuilders, + @Qualifier("longTailPrefix") String longTailPrefix, + @Qualifier("longTailFixtureName") String longTailFixtureName) + throws IOException { + return searchServiceHelper( + longTailEntityRegistry, + longTailEntitySearchService, + longTailIndexBuilders, + longTailPrefix, + longTailFixtureName); + } + + public SearchService searchServiceHelper( + EntityRegistry entityRegistry, + ElasticSearchService entitySearchService, + EntityIndexBuilders indexBuilders, + String prefix, + String fixtureName) + throws IOException { + int batchSize = 100; + SearchRanker ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices & write fixture data + indexBuilders.reindexAll(); + + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return service; + } + + @Bean(name = "sampleDataEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + return entityClientHelper(searchService, entitySearchService, entityRegistry); + } + + @Bean(name = "longTailEntityClient") + @Nonnull + protected EntityClient longTailEntityClient( + @Qualifier("sampleDataSearchService") SearchService searchService, + @Qualifier("sampleDataEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry longTailEntityRegistry) { + return entityClientHelper(searchService, entitySearchService, longTailEntityRegistry); + } + + private EntityClient entityClientHelper( + SearchService searchService, + ElasticSearchService entitySearchService, + EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + AspectDao mockAspectDao = mock(AspectDao.class); + when(mockAspectDao.batchGet(anySet())) + .thenReturn(Map.of(mock(EntityAspectIdentifier.class), mock(EntityAspect.class))); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(mockAspectDao, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java index d74dd041f082e..d3b16b2beed3d 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchFixtureUtils.java @@ -1,13 +1,18 @@ package io.datahubproject.test.fixtures.search; +import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; +import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import io.datahubproject.test.search.ElasticsearchTestContainer; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import io.datahubproject.test.models.DatasetAnonymized; +import io.datahubproject.test.search.ElasticsearchTestContainer; import io.datahubproject.test.search.SearchTestUtils; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Set; import org.opensearch.client.RestHighLevelClient; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; @@ -15,125 +20,127 @@ import org.testng.annotations.Ignore; import org.testng.annotations.Test; -import java.io.IOException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.INGESTION_MAX_SERIALIZED_STRING_LENGTH; -import static com.linkedin.metadata.Constants.MAX_JACKSON_STRING_SIZE; - -/** - * This class is used for extracting and moving search fixture data. - */ +/** This class is used for extracting and moving search fixture data. */ @TestConfiguration public class SearchFixtureUtils { - final public static String FIXTURE_BASE = "src/test/resources/elasticsearch"; - - final public static ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } - - @Bean(name = "testSearchContainer") - public GenericContainer testSearchContainer() { - return new ElasticsearchTestContainer().startContainer(); + public static final String FIXTURE_BASE = "src/test/resources/elasticsearch"; + + public static final ObjectMapper OBJECT_MAPPER = + new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + @Bean(name = "testSearchContainer") + public GenericContainer testSearchContainer() { + return new ElasticsearchTestContainer().startContainer(); + } + + @Test + @Ignore("Fixture capture lineage") + /* + * Run this to capture test fixtures, repeat for graph & dataset + * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data + * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Create output directory + * 5. Run extraction + **/ + private void extractSearchLineageTestFixture() throws IOException { + String rootUrn = + "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; + + // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + /* + LineageExporter exporter = LineageExporter.builder() + .writer(fixtureWriter) + .anonymizerClazz(GraphAnonymized.class) + .graphIndexName("_graph_service_v1-5shards") + .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) + .build(); + */ + + LineageExporter exporter = + LineageExporter.builder() + .writer(fixtureWriter) + .anonymizerClazz(DatasetAnonymized.class) + .entityIndexName("_datasetindex_v2-5shards") + .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) + .build(); + + exporter.export(Set.of(rootUrn)); } - - @Test - @Ignore("Fixture capture lineage") - /* - * Run this to capture test fixtures, repeat for graph & dataset - * 1. Configure anonymizer class (use builder or set to null) Do not commit non-anonymous data - * 2. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Create output directory - * 5. Run extraction - **/ - private void extractSearchLineageTestFixture() throws IOException { - String rootUrn = "urn:li:dataset:(urn:li:dataPlatform:teradata,teradata.simba.pp_bi_tables.tmis_daily_metrics_final_agg,PROD)"; - - // Set.of("system_metadata_service_v1", "datasetindex_v2", "graph_service_v1") - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - /* - LineageExporter exporter = LineageExporter.builder() - .writer(fixtureWriter) - .anonymizerClazz(GraphAnonymized.class) - .graphIndexName("_graph_service_v1-5shards") - .graphOutputPath(String.format("%s/%s.json", "search_lineage2", "graph_service_v1")) - .build(); - */ - - LineageExporter exporter = LineageExporter.builder() - .writer(fixtureWriter) - .anonymizerClazz(DatasetAnonymized.class) - .entityIndexName("_datasetindex_v2-5shards") - .entityOutputPath(String.format("%s/%s.json", "search_lineage2", "datasetindex_v2")) - .build(); - - exporter.export(Set.of(rootUrn)); - } - } - - @Test - @Ignore("Fixture capture logic") - /* - * Run this to capture test fixtures - * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) - * 2. Update fixture name - * 3. Comment @Ignore - * 4. Run extraction - **/ - private void extractEntityTestFixture() throws IOException { - String fixtureName = "temp"; - String prefix = ""; - String commonSuffix = "index_v2"; - - try (RestHighLevelClient client = new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { - FixtureWriter fixtureWriter = FixtureWriter.builder() - .client(client) - .build(); - - EntityExporter exporter = EntityExporter.builder() - .client(client) - .writer(fixtureWriter) - .fixtureName(fixtureName) - .sourceIndexSuffix(commonSuffix) - .sourceIndexPrefix(prefix) - .build(); - - exporter.export(); - } - } - - @Test - @Ignore("Write capture logic to some external ES cluster for testing") - /* - * Can be used to write fixture data to external ES cluster - * 1. Set environment variables - * 2. Update fixture name and prefix - * 3. Uncomment and run test - */ - private void reindexTestFixtureData() throws IOException { - ESBulkProcessor bulkProcessor = ESBulkProcessor.builder(new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) - .async(true) - .bulkRequestsLimit(1000) - .retryInterval(1L) - .numRetries(2) - .build(); - - FixtureReader reader = FixtureReader.builder() - .bulkProcessor(bulkProcessor) - .fixtureName("long_tail") - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build(); - - reader.read(); + } + + @Test + @Ignore("Fixture capture logic") + /* + * Run this to capture test fixtures + * 1. Update environment variables for ELASTICSEARCH_* (see buildEnvironmentClient) + * 2. Update fixture name + * 3. Comment @Ignore + * 4. Run extraction + **/ + private void extractEntityTestFixture() throws IOException { + String fixtureName = "temp"; + String prefix = ""; + String commonSuffix = "index_v2"; + + try (RestHighLevelClient client = + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) { + FixtureWriter fixtureWriter = FixtureWriter.builder().client(client).build(); + + EntityExporter exporter = + EntityExporter.builder() + .client(client) + .writer(fixtureWriter) + .fixtureName(fixtureName) + .sourceIndexSuffix(commonSuffix) + .sourceIndexPrefix(prefix) + .build(); + + exporter.export(); } + } + + @Test + @Ignore("Write capture logic to some external ES cluster for testing") + /* + * Can be used to write fixture data to external ES cluster + * 1. Set environment variables + * 2. Update fixture name and prefix + * 3. Uncomment and run test + */ + private void reindexTestFixtureData() throws IOException { + ESBulkProcessor bulkProcessor = + ESBulkProcessor.builder( + new RestHighLevelClient(SearchTestUtils.environmentRestClientBuilder())) + .async(true) + .bulkRequestsLimit(1000) + .retryInterval(1L) + .numRetries(2) + .build(); + + FixtureReader reader = + FixtureReader.builder() + .bulkProcessor(bulkProcessor) + .fixtureName("long_tail") + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build(); + + reader.read(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java index 93d3f108d9e47..978471b53faad 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/fixtures/search/SearchLineageFixtureConfiguration.java @@ -1,7 +1,9 @@ package io.datahubproject.test.fixtures.search; -import io.datahubproject.test.search.config.SearchCommonTestConfiguration; -import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.config.cache.EntityDocCountCacheConfiguration; import com.linkedin.metadata.config.cache.SearchLineageCacheConfiguration; @@ -9,8 +11,6 @@ import com.linkedin.metadata.config.search.GraphQueryConfiguration; import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; @@ -34,9 +34,12 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.metadata.version.GitVersion; - +import io.datahubproject.test.search.config.SearchCommonTestConfiguration; +import io.datahubproject.test.search.config.SearchTestContainerConfiguration; +import java.io.IOException; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -46,180 +49,199 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; -import javax.annotation.Nonnull; -import java.io.IOException; -import java.util.Map; - -import static com.linkedin.metadata.Constants.*; - - @TestConfiguration @Import(SearchCommonTestConfiguration.class) public class SearchLineageFixtureConfiguration { - @Autowired - private ESBulkProcessor _bulkProcessor; - - @Autowired - private RestHighLevelClient _searchClient; - - @Autowired - private SearchConfiguration _searchConfiguration; - - @Autowired - private CustomSearchConfiguration _customSearchConfiguration; - - @Bean(name = "searchLineagePrefix") - protected String indexPrefix() { - return "srchlin"; - } - - @Bean(name = "searchLineageIndexConvention") - protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { - return new IndexConventionImpl(prefix); - } - - @Bean(name = "searchLineageFixtureName") - protected String fixtureName() { - return "search_lineage"; - } - - @Bean(name = "lineageCacheConfiguration") - protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { - SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); - conf.setLightningThreshold(300); - conf.setTtlSeconds(30); - return conf; - } - - @Bean(name = "searchLineageEntityIndexBuilders") - protected EntityIndexBuilders entityIndexBuilders( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - ESIndexBuilder indexBuilder = new ESIndexBuilder(_searchClient, 1, 0, 1, - 1, Map.of(), true, false, - new ElasticSearchConfiguration(), gitVersion); - SettingsBuilder settingsBuilder = new SettingsBuilder(null); - return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); - } - - @Bean(name = "searchLineageEntitySearchService") - protected ElasticSearchService entitySearchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - ESSearchDAO searchDAO = new ESSearchDAO(entityRegistry, _searchClient, indexConvention, false, - ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, _searchConfiguration, null); - ESBrowseDAO browseDAO = new ESBrowseDAO(entityRegistry, _searchClient, indexConvention, _searchConfiguration, _customSearchConfiguration); - ESWriteDAO writeDAO = new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); - return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); - } - - @Bean(name = "searchLineageESIndexBuilder") - @Nonnull - protected ESIndexBuilder esIndexBuilder() { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(_searchClient, 1, 1, 1, 1, Map.of(), - true, true, - new ElasticSearchConfiguration(), gitVersion); - } - - @Bean(name = "searchLineageGraphService") - @Nonnull - protected ElasticSearchGraphService graphService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, - @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention - ) { - LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - ElasticSearchGraphService graphService = new ElasticSearchGraphService(lineageRegistry, _bulkProcessor, indexConvention, - new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), - new ESGraphQueryDAO(_searchClient, lineageRegistry, indexConvention, GraphQueryConfiguration.testDefaults), indexBuilder); - graphService.configure(); - return graphService; - } - - @Bean(name = "searchLineageLineageSearchService") - @Nonnull - protected LineageSearchService lineageSearchService( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, - @Qualifier("searchLineagePrefix") String prefix, - @Qualifier("searchLineageFixtureName") String fixtureName, - @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration - ) throws IOException { - - // Load fixture data (after graphService mappings applied) - FixtureReader.builder() - .bulkProcessor(_bulkProcessor) - .fixtureName(fixtureName) - .targetIndexPrefix(prefix) - .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) - .build() - .read(); - - return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); - } - - @Bean(name = "searchLineageSearchService") - @Nonnull - protected SearchService searchService( - @Qualifier("entityRegistry") EntityRegistry entityRegistry, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders - ) throws IOException { - - int batchSize = 100; - SearchRanker ranker = new SimpleRanker(); - CacheManager cacheManager = new ConcurrentMapCacheManager(); - EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = new EntityDocCountCacheConfiguration(); - entityDocCountCacheConfiguration.setTtlSeconds(600L); - - SearchService service = new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, entityDocCountCacheConfiguration), - new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - false - ), - ranker - ); - - // Build indices - indexBuilders.reindexAll(); - - return service; - } - - @Bean(name = "searchLineageEntityClient") - @Nonnull - protected EntityClient entityClient( - @Qualifier("searchLineageSearchService") SearchService searchService, - @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, - @Qualifier("entityRegistry") EntityRegistry entityRegistry - ) { - CachingEntitySearchService cachingEntitySearchService = new CachingEntitySearchService( - new ConcurrentMapCacheManager(), - entitySearchService, - 1, - false); - - PreProcessHooks preProcessHooks = new PreProcessHooks(); - preProcessHooks.setUiEnabled(true); - return new JavaEntityClient( - new EntityServiceImpl(null, null, entityRegistry, true, null, - preProcessHooks), - null, - entitySearchService, - cachingEntitySearchService, - searchService, - null, - null, - null, - null); - } + @Autowired private ESBulkProcessor _bulkProcessor; + + @Autowired private RestHighLevelClient _searchClient; + + @Autowired private SearchConfiguration _searchConfiguration; + + @Autowired private CustomSearchConfiguration _customSearchConfiguration; + + @Bean(name = "searchLineagePrefix") + protected String indexPrefix() { + return "srchlin"; + } + + @Bean(name = "searchLineageIndexConvention") + protected IndexConvention indexConvention(@Qualifier("searchLineagePrefix") String prefix) { + return new IndexConventionImpl(prefix); + } + + @Bean(name = "searchLineageFixtureName") + protected String fixtureName() { + return "search_lineage"; + } + + @Bean(name = "lineageCacheConfiguration") + protected SearchLineageCacheConfiguration searchLineageCacheConfiguration() { + SearchLineageCacheConfiguration conf = new SearchLineageCacheConfiguration(); + conf.setLightningThreshold(300); + conf.setTtlSeconds(30); + return conf; + } + + @Bean(name = "searchLineageEntityIndexBuilders") + protected EntityIndexBuilders entityIndexBuilders( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + ESIndexBuilder indexBuilder = + new ESIndexBuilder( + _searchClient, + 1, + 0, + 1, + 1, + Map.of(), + true, + false, + new ElasticSearchConfiguration(), + gitVersion); + SettingsBuilder settingsBuilder = new SettingsBuilder(null); + return new EntityIndexBuilders(indexBuilder, entityRegistry, indexConvention, settingsBuilder); + } + + @Bean(name = "searchLineageEntitySearchService") + protected ElasticSearchService entitySearchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + ESSearchDAO searchDAO = + new ESSearchDAO( + entityRegistry, + _searchClient, + indexConvention, + false, + ELASTICSEARCH_IMPLEMENTATION_ELASTICSEARCH, + _searchConfiguration, + null); + ESBrowseDAO browseDAO = + new ESBrowseDAO( + entityRegistry, + _searchClient, + indexConvention, + _searchConfiguration, + _customSearchConfiguration); + ESWriteDAO writeDAO = + new ESWriteDAO(entityRegistry, _searchClient, indexConvention, _bulkProcessor, 1); + return new ElasticSearchService(indexBuilders, searchDAO, browseDAO, writeDAO); + } + + @Bean(name = "searchLineageESIndexBuilder") + @Nonnull + protected ESIndexBuilder esIndexBuilder() { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + _searchClient, + 1, + 1, + 1, + 1, + Map.of(), + true, + true, + new ElasticSearchConfiguration(), + gitVersion); + } + + @Bean(name = "searchLineageGraphService") + @Nonnull + protected ElasticSearchGraphService graphService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageESIndexBuilder") ESIndexBuilder indexBuilder, + @Qualifier("searchLineageIndexConvention") IndexConvention indexConvention) { + LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); + ElasticSearchGraphService graphService = + new ElasticSearchGraphService( + lineageRegistry, + _bulkProcessor, + indexConvention, + new ESGraphWriteDAO(indexConvention, _bulkProcessor, 1), + new ESGraphQueryDAO( + _searchClient, + lineageRegistry, + indexConvention, + GraphQueryConfiguration.testDefaults), + indexBuilder); + graphService.configure(); + return graphService; + } + + @Bean(name = "searchLineageLineageSearchService") + @Nonnull + protected LineageSearchService lineageSearchService( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageGraphService") ElasticSearchGraphService graphService, + @Qualifier("searchLineagePrefix") String prefix, + @Qualifier("searchLineageFixtureName") String fixtureName, + @Qualifier("lineageCacheConfiguration") SearchLineageCacheConfiguration cacheConfiguration) + throws IOException { + + // Load fixture data (after graphService mappings applied) + FixtureReader.builder() + .bulkProcessor(_bulkProcessor) + .fixtureName(fixtureName) + .targetIndexPrefix(prefix) + .refreshIntervalSeconds(SearchTestContainerConfiguration.REFRESH_INTERVAL_SECONDS) + .build() + .read(); + + return new LineageSearchService(searchService, graphService, null, false, cacheConfiguration); + } + + @Bean(name = "searchLineageSearchService") + @Nonnull + protected SearchService searchService( + @Qualifier("entityRegistry") EntityRegistry entityRegistry, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("searchLineageEntityIndexBuilders") EntityIndexBuilders indexBuilders) + throws IOException { + + int batchSize = 100; + SearchRanker ranker = new SimpleRanker(); + CacheManager cacheManager = new ConcurrentMapCacheManager(); + EntityDocCountCacheConfiguration entityDocCountCacheConfiguration = + new EntityDocCountCacheConfiguration(); + entityDocCountCacheConfiguration.setTtlSeconds(600L); + + SearchService service = + new SearchService( + new EntityDocCountCache( + entityRegistry, entitySearchService, entityDocCountCacheConfiguration), + new CachingEntitySearchService(cacheManager, entitySearchService, batchSize, false), + ranker); + + // Build indices + indexBuilders.reindexAll(); + + return service; + } + + @Bean(name = "searchLineageEntityClient") + @Nonnull + protected EntityClient entityClient( + @Qualifier("searchLineageSearchService") SearchService searchService, + @Qualifier("searchLineageEntitySearchService") ElasticSearchService entitySearchService, + @Qualifier("entityRegistry") EntityRegistry entityRegistry) { + CachingEntitySearchService cachingEntitySearchService = + new CachingEntitySearchService( + new ConcurrentMapCacheManager(), entitySearchService, 1, false); + + PreProcessHooks preProcessHooks = new PreProcessHooks(); + preProcessHooks.setUiEnabled(true); + return new JavaEntityClient( + new EntityServiceImpl(null, null, entityRegistry, true, null, preProcessHooks), + null, + entitySearchService, + cachingEntitySearchService, + searchService, + null, + null, + null, + null); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java index 6036473063059..3b68ef50be18f 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/Anonymized.java @@ -1,53 +1,48 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonSetter; - import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.regex.Matcher; import java.util.regex.Pattern; - - import org.apache.commons.codec.binary.Hex; public abstract class Anonymized { - public String urn; - - @JsonSetter - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } - - private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); - - public static String anonymizeUrn(String urn) { - if (urn != null) { - Matcher m = URN_REGEX.matcher(urn); - if (m.find()) { - return String.format("%s(%s,%s,%s)", - m.group(1), - anonymizeLast(m.group(2), ":"), - hashFunction(m.group(3)), - m.group(4)); - } - } - return urn; + public String urn; + + @JsonSetter + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); + } + + private static final Pattern URN_REGEX = Pattern.compile("^(.+)[(](.+),(.+),([A-Z]+)[)]$"); + + public static String anonymizeUrn(String urn) { + if (urn != null) { + Matcher m = URN_REGEX.matcher(urn); + if (m.find()) { + return String.format( + "%s(%s,%s,%s)", + m.group(1), anonymizeLast(m.group(2), ":"), hashFunction(m.group(3)), m.group(4)); + } } - - protected static String anonymizeLast(String s, String sep) { - String[] splits = s.split(sep); - splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); - return String.join(sep, splits); - } - - protected static String hashFunction(String s) { - try { - MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); - messageDigest.update(s.getBytes()); - char[] hex = Hex.encodeHex(messageDigest.digest()); - return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); - } catch (NoSuchAlgorithmException e) { - throw new RuntimeException(e); - } + return urn; + } + + protected static String anonymizeLast(String s, String sep) { + String[] splits = s.split(sep); + splits[splits.length - 1] = hashFunction(splits[splits.length - 1]); + return String.join(sep, splits); + } + + protected static String hashFunction(String s) { + try { + MessageDigest messageDigest = MessageDigest.getInstance("SHA-256"); + messageDigest.update(s.getBytes()); + char[] hex = Hex.encodeHex(messageDigest.digest()); + return new String(hex).substring(0, Math.min(s.length() - 1, hex.length - 1)); + } catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java index 35813d22067a6..c870b4682a6b8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/DatasetAnonymized.java @@ -1,7 +1,6 @@ package io.datahubproject.test.models; import com.fasterxml.jackson.annotation.JsonGetter; - import java.util.Arrays; import java.util.Optional; import java.util.Set; @@ -9,35 +8,38 @@ public class DatasetAnonymized extends Anonymized { - public Set upstreams; - public String id; - public String origin; - public String platform; - public boolean removed; - public Set browsePaths; - - @JsonGetter("id") - public String getId() { - return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); - } - - @JsonGetter("platform") - public String getPlatform() { - return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); - } - - @JsonGetter("upstreams") - public Set getUpstreams() { - return Optional.ofNullable(upstreams).orElse(Set.of()).stream() - .map(Anonymized::anonymizeUrn).collect(Collectors.toSet()); - } - - @JsonGetter("browsePaths") - public Set getBrowsePaths() { - return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() - .map(p -> Arrays.stream(p.split("/")) - .map(Anonymized::hashFunction) - .collect(Collectors.joining("/")) - ).collect(Collectors.toSet()); - } + public Set upstreams; + public String id; + public String origin; + public String platform; + public boolean removed; + public Set browsePaths; + + @JsonGetter("id") + public String getId() { + return Optional.ofNullable(id).map(Anonymized::hashFunction).orElse(null); + } + + @JsonGetter("platform") + public String getPlatform() { + return Optional.ofNullable(platform).map(p -> Anonymized.anonymizeLast(p, ":")).orElse(null); + } + + @JsonGetter("upstreams") + public Set getUpstreams() { + return Optional.ofNullable(upstreams).orElse(Set.of()).stream() + .map(Anonymized::anonymizeUrn) + .collect(Collectors.toSet()); + } + + @JsonGetter("browsePaths") + public Set getBrowsePaths() { + return Optional.ofNullable(browsePaths).orElse(Set.of()).stream() + .map( + p -> + Arrays.stream(p.split("/")) + .map(Anonymized::hashFunction) + .collect(Collectors.joining("/"))) + .collect(Collectors.toSet()); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java index 3d2360ae04228..bbd95671ee95a 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java +++ b/metadata-io/src/test/java/io/datahubproject/test/models/GraphAnonymized.java @@ -3,17 +3,17 @@ import com.fasterxml.jackson.annotation.JsonSetter; public class GraphAnonymized { - public GraphNode source; - public GraphNode destination; - public String relationshipType; + public GraphNode source; + public GraphNode destination; + public String relationshipType; - public static class GraphNode extends Anonymized { - public String urn; - public String entityType; + public static class GraphNode extends Anonymized { + public String urn; + public String entityType; - @JsonSetter("urn") - public void setUrn(String urn) { - this.urn = anonymizeUrn(urn); - } + @JsonSetter("urn") + public void setUrn(String urn) { + this.urn = anonymizeUrn(urn); } + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java index 233a667d078dd..4129a2f997dc8 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/ElasticsearchTestContainer.java @@ -1,42 +1,46 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class ElasticsearchTestContainer implements SearchTestContainer { - private static final String ELASTIC_VERSION = "7.10.1"; - private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; - private static final String ENV_ELASTIC_IMAGE_FULL_NAME = System.getenv("ELASTIC_IMAGE_FULL_NAME"); - private static final String ELASTIC_IMAGE_FULL_NAME = ENV_ELASTIC_IMAGE_FULL_NAME != null - ? ENV_ELASTIC_IMAGE_FULL_NAME : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); - - protected static final GenericContainer ES_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticsearchContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(ES_CONTAINER.getDockerClient()); - ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String ELASTIC_VERSION = "7.10.1"; + private static final String ELASTIC_IMAGE_NAME = "docker.elastic.co/elasticsearch/elasticsearch"; + private static final String ENV_ELASTIC_IMAGE_FULL_NAME = + System.getenv("ELASTIC_IMAGE_FULL_NAME"); + private static final String ELASTIC_IMAGE_FULL_NAME = + ENV_ELASTIC_IMAGE_FULL_NAME != null + ? ENV_ELASTIC_IMAGE_FULL_NAME + : ELASTIC_IMAGE_NAME + ":" + ELASTIC_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(ELASTIC_IMAGE_FULL_NAME).asCompatibleSubstituteFor(ELASTIC_IMAGE_NAME); + + protected static final GenericContainer ES_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticsearchContainer defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + ES_CONTAINER = new org.testcontainers.elasticsearch.ElasticsearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(ES_CONTAINER.getDockerClient()); + ES_CONTAINER.withEnv("ES_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer startContainer() { + if (!isStarted) { + ElasticsearchTestContainer.ES_CONTAINER.start(); + isStarted = true; } + return ES_CONTAINER; + } - @Override - public GenericContainer startContainer() { - if (!isStarted) { - ElasticsearchTestContainer.ES_CONTAINER.start(); - isStarted = true; - } - return ES_CONTAINER; - } - - @Override - public void stopContainer() { - ES_CONTAINER.stop(); - } + @Override + public void stopContainer() { + ES_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java index d94b88b466f89..739169b834a57 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/OpenSearchTestContainer.java @@ -1,43 +1,50 @@ package io.datahubproject.test.search; +import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; + import org.opensearch.testcontainers.OpensearchContainer; import org.testcontainers.containers.GenericContainer; import org.testcontainers.utility.DockerImageName; - -import static com.linkedin.metadata.DockerTestUtils.checkContainerEngine; - public class OpenSearchTestContainer implements SearchTestContainer { - private static final String OPENSEARCH_VERSION = "2.9.0"; - private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; - private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); - private static final String OPENSEARCH_IMAGE_FULL_NAME = ENV_OPENSEARCH_IMAGE_FULL_NAME != null - ? ENV_OPENSEARCH_IMAGE_FULL_NAME : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; - private static final DockerImageName DOCKER_IMAGE_NAME = DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) - .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); - - protected static final GenericContainer OS_CONTAINER; - private boolean isStarted = false; - - // A helper method to create an ElasticseachContainer defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - static { - OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); - checkContainerEngine(OS_CONTAINER.getDockerClient()); - OS_CONTAINER.withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS).withStartupTimeout(STARTUP_TIMEOUT); + private static final String OPENSEARCH_VERSION = "2.9.0"; + private static final String OPENSEARCH_IMAGE_NAME = "opensearchproject/opensearch"; + private static final String ENV_OPENSEARCH_IMAGE_FULL_NAME = + System.getenv("OPENSEARCH_IMAGE_FULL_NAME"); + private static final String OPENSEARCH_IMAGE_FULL_NAME = + ENV_OPENSEARCH_IMAGE_FULL_NAME != null + ? ENV_OPENSEARCH_IMAGE_FULL_NAME + : OPENSEARCH_IMAGE_NAME + ":" + OPENSEARCH_VERSION; + private static final DockerImageName DOCKER_IMAGE_NAME = + DockerImageName.parse(OPENSEARCH_IMAGE_FULL_NAME) + .asCompatibleSubstituteFor(OPENSEARCH_IMAGE_NAME); + + protected static final GenericContainer OS_CONTAINER; + private boolean isStarted = false; + + // A helper method to create an ElasticseachContainer defaulting to the current image and version, + // with the ability + // within firewalled environments to override with an environment variable to point to the offline + // repository. + static { + OS_CONTAINER = new OpensearchContainer(DOCKER_IMAGE_NAME); + checkContainerEngine(OS_CONTAINER.getDockerClient()); + OS_CONTAINER + .withEnv("OPENSEARCH_JAVA_OPTS", SEARCH_JAVA_OPTS) + .withStartupTimeout(STARTUP_TIMEOUT); + } + + @Override + public GenericContainer startContainer() { + if (!isStarted) { + OS_CONTAINER.start(); + isStarted = true; } + return OS_CONTAINER; + } - @Override - public GenericContainer startContainer() { - if (!isStarted) { - OS_CONTAINER.start(); - isStarted = true; - } - return OS_CONTAINER; - } - - @Override - public void stopContainer() { - OS_CONTAINER.stop(); - } + @Override + public void stopContainer() { + OS_CONTAINER.stop(); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java index 34aa6978f742f..cda6a4c179f48 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestContainer.java @@ -1,16 +1,15 @@ package io.datahubproject.test.search; -import org.testcontainers.containers.GenericContainer; - import java.time.Duration; +import org.testcontainers.containers.GenericContainer; public interface SearchTestContainer { - String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; + String SEARCH_JAVA_OPTS = "-Xms446m -Xmx446m -XX:MaxDirectMemorySize=368435456"; - Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min + Duration STARTUP_TIMEOUT = Duration.ofMinutes(5); // usually < 1min - GenericContainer startContainer(); + GenericContainer startContainer(); - void stopContainer(); + void stopContainer(); } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 414b9f927fada..58ea020e42565 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -1,5 +1,8 @@ package io.datahubproject.test.search; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; @@ -18,6 +21,11 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nullable; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; @@ -27,121 +35,174 @@ import org.opensearch.client.RestClient; import org.opensearch.client.RestClientBuilder; -import javax.annotation.Nullable; -import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.AUTO_COMPLETE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - public class SearchTestUtils { - private SearchTestUtils() { - } - - public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { - bulkProcessor.flush(); - Thread.sleep(1000); - } - - public final static List SEARCHABLE_ENTITIES; - static { - SEARCHABLE_ENTITIES = Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) - .map(EntityTypeMapper::getName) - .distinct() - .collect(Collectors.toList()); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query) { - return searchAcrossEntities(searchService, query, null); - } - - public static SearchResult searchAcrossEntities(SearchService searchService, String query, @Nullable List facets) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true), facets); - } - - public static SearchResult searchAcrossCustomEntities(SearchService searchService, String query, List searchableEntities) { - return searchService.searchAcrossEntities(searchableEntities, query, null, null, 0, - 100, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult search(SearchService searchService, String query) { - return search(searchService, SEARCHABLE_ENTITIES, query); - } - - public static SearchResult search(SearchService searchService, List entities, String query) { - return searchService.search(entities, query, null, null, 0, 100, - new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static ScrollResult scroll(SearchService searchService, String query, int batchSize, @Nullable String scrollId) { - return searchService.scrollAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, - scrollId, "3m", batchSize, new SearchFlags().setFulltext(true).setSkipCache(true)); - } - - public static SearchResult searchStructured(SearchService searchService, String query) { - return searchService.searchAcrossEntities(SEARCHABLE_ENTITIES, query, null, null, 0, - 100, new SearchFlags().setFulltext(false).setSkipCache(true)); - } - - public static LineageSearchResult lineage(LineageSearchService lineageSearchService, Urn root, int hops) { - String degree = hops >= 3 ? "3+" : String.valueOf(hops); - List filters = List.of(FacetFilterInput.builder() + private SearchTestUtils() {} + + public static void syncAfterWrite(ESBulkProcessor bulkProcessor) throws InterruptedException { + bulkProcessor.flush(); + Thread.sleep(1000); + } + + public static final List SEARCHABLE_ENTITIES; + + static { + SEARCHABLE_ENTITIES = + Stream.concat(SEARCHABLE_ENTITY_TYPES.stream(), AUTO_COMPLETE_ENTITY_TYPES.stream()) + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + } + + public static SearchResult searchAcrossEntities(SearchService searchService, String query) { + return searchAcrossEntities(searchService, query, null); + } + + public static SearchResult searchAcrossEntities( + SearchService searchService, String query, @Nullable List facets) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true), + facets); + } + + public static SearchResult searchAcrossCustomEntities( + SearchService searchService, String query, List searchableEntities) { + return searchService.searchAcrossEntities( + searchableEntities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult search(SearchService searchService, String query) { + return search(searchService, SEARCHABLE_ENTITIES, query); + } + + public static SearchResult search( + SearchService searchService, List entities, String query) { + return searchService.search( + entities, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static ScrollResult scroll( + SearchService searchService, String query, int batchSize, @Nullable String scrollId) { + return searchService.scrollAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + scrollId, + "3m", + batchSize, + new SearchFlags().setFulltext(true).setSkipCache(true)); + } + + public static SearchResult searchStructured(SearchService searchService, String query) { + return searchService.searchAcrossEntities( + SEARCHABLE_ENTITIES, + query, + null, + null, + 0, + 100, + new SearchFlags().setFulltext(false).setSkipCache(true)); + } + + public static LineageSearchResult lineage( + LineageSearchService lineageSearchService, Urn root, int hops) { + String degree = hops >= 3 ? "3+" : String.valueOf(hops); + List filters = + List.of( + FacetFilterInput.builder() .setField("degree") .setCondition(FilterOperator.EQUAL) .setValues(List.of(degree)) .setNegated(false) .build()); - return lineageSearchService.searchAcrossLineage(root, LineageDirection.DOWNSTREAM, - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", hops, ResolverUtils.buildFilter(filters, List.of()), null, 0, 100, null, - null, new SearchFlags().setSkipCache(true)); - } - - public static AutoCompleteResults autocomplete(SearchableEntityType searchableEntityType, String query) throws Exception { - return searchableEntityType.autoComplete(query, null, null, 100, new QueryContext() { - @Override - public boolean isAuthenticated() { - return true; - } - - @Override - public Authentication getAuthentication() { - return null; - } - - @Override - public Authorizer getAuthorizer() { - return null; - } + return lineageSearchService.searchAcrossLineage( + root, + LineageDirection.DOWNSTREAM, + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + hops, + ResolverUtils.buildFilter(filters, List.of()), + null, + 0, + 100, + null, + null, + new SearchFlags().setSkipCache(true)); + } + + public static AutoCompleteResults autocomplete( + SearchableEntityType searchableEntityType, String query) throws Exception { + return searchableEntityType.autoComplete( + query, + null, + null, + 100, + new QueryContext() { + @Override + public boolean isAuthenticated() { + return true; + } + + @Override + public Authentication getAuthentication() { + return null; + } + + @Override + public Authorizer getAuthorizer() { + return null; + } }); - } - - public static RestClientBuilder environmentRestClientBuilder() { - Integer port = Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); - return RestClient.builder( - new HttpHost(Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), - port, port.equals(443) ? "https" : "http")) - .setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { - @Override - public HttpAsyncClientBuilder customizeHttpClient( - HttpAsyncClientBuilder httpClientBuilder) { - httpClientBuilder.disableAuthCaching(); - - if (System.getenv("ELASTICSEARCH_USERNAME") != null) { - final CredentialsProvider credentialsProvider = - new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials(System.getenv("ELASTICSEARCH_USERNAME"), - System.getenv("ELASTICSEARCH_PASSWORD"))); - httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); - } - - return httpClientBuilder; - } - }); - } + } + + public static RestClientBuilder environmentRestClientBuilder() { + Integer port = + Integer.parseInt(Optional.ofNullable(System.getenv("ELASTICSEARCH_PORT")).orElse("9200")); + return RestClient.builder( + new HttpHost( + Optional.ofNullable(System.getenv("ELASTICSEARCH_HOST")).orElse("localhost"), + port, + port.equals(443) ? "https" : "http")) + .setHttpClientConfigCallback( + new RestClientBuilder.HttpClientConfigCallback() { + @Override + public HttpAsyncClientBuilder customizeHttpClient( + HttpAsyncClientBuilder httpClientBuilder) { + httpClientBuilder.disableAuthCaching(); + + if (System.getenv("ELASTICSEARCH_USERNAME") != null) { + final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + AuthScope.ANY, + new UsernamePasswordCredentials( + System.getenv("ELASTICSEARCH_USERNAME"), + System.getenv("ELASTICSEARCH_PASSWORD"))); + httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); + } + + return httpClientBuilder; + } + }); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java index 530d3f4d53625..17747d9ba1cc9 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchCommonTestConfiguration.java @@ -13,51 +13,50 @@ import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; -/** - * This is common configuration for search regardless of which - * test container implementation. - */ +/** This is common configuration for search regardless of which test container implementation. */ @TestConfiguration public class SearchCommonTestConfiguration { - @Bean - public SearchConfiguration searchConfiguration() { - SearchConfiguration searchConfiguration = new SearchConfiguration(); - searchConfiguration.setMaxTermBucketSize(20); - - ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); - exactMatchConfiguration.setExclusive(false); - exactMatchConfiguration.setExactFactor(10.0f); - exactMatchConfiguration.setWithPrefix(true); - exactMatchConfiguration.setPrefixFactor(6.0f); - exactMatchConfiguration.setCaseSensitivityFactor(0.7f); - exactMatchConfiguration.setEnableStructured(true); - - WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); - wordGramConfiguration.setTwoGramFactor(1.2f); - wordGramConfiguration.setThreeGramFactor(1.5f); - wordGramConfiguration.setFourGramFactor(1.8f); - - PartialConfiguration partialConfiguration = new PartialConfiguration(); - partialConfiguration.setFactor(0.4f); - partialConfiguration.setUrnFactor(0.5f); - - searchConfiguration.setExactMatch(exactMatchConfiguration); - searchConfiguration.setWordGram(wordGramConfiguration); - searchConfiguration.setPartial(partialConfiguration); - return searchConfiguration; - } - - @Bean - public CustomSearchConfiguration customSearchConfiguration() throws Exception { - CustomConfiguration customConfiguration = new CustomConfiguration(); - customConfiguration.setEnabled(true); - customConfiguration.setFile("search_config_builder_test.yml"); - return customConfiguration.resolve(new YAMLMapper()); - } - - @Bean(name = "entityRegistry") - public EntityRegistry entityRegistry() throws EntityRegistryException { - return new ConfigEntityRegistry( - SearchCommonTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - } + @Bean + public SearchConfiguration searchConfiguration() { + SearchConfiguration searchConfiguration = new SearchConfiguration(); + searchConfiguration.setMaxTermBucketSize(20); + + ExactMatchConfiguration exactMatchConfiguration = new ExactMatchConfiguration(); + exactMatchConfiguration.setExclusive(false); + exactMatchConfiguration.setExactFactor(10.0f); + exactMatchConfiguration.setWithPrefix(true); + exactMatchConfiguration.setPrefixFactor(6.0f); + exactMatchConfiguration.setCaseSensitivityFactor(0.7f); + exactMatchConfiguration.setEnableStructured(true); + + WordGramConfiguration wordGramConfiguration = new WordGramConfiguration(); + wordGramConfiguration.setTwoGramFactor(1.2f); + wordGramConfiguration.setThreeGramFactor(1.5f); + wordGramConfiguration.setFourGramFactor(1.8f); + + PartialConfiguration partialConfiguration = new PartialConfiguration(); + partialConfiguration.setFactor(0.4f); + partialConfiguration.setUrnFactor(0.5f); + + searchConfiguration.setExactMatch(exactMatchConfiguration); + searchConfiguration.setWordGram(wordGramConfiguration); + searchConfiguration.setPartial(partialConfiguration); + return searchConfiguration; + } + + @Bean + public CustomSearchConfiguration customSearchConfiguration() throws Exception { + CustomConfiguration customConfiguration = new CustomConfiguration(); + customConfiguration.setEnabled(true); + customConfiguration.setFile("search_config_builder_test.yml"); + return customConfiguration.resolve(new YAMLMapper()); + } + + @Bean(name = "entityRegistry") + public EntityRegistry entityRegistry() throws EntityRegistryException { + return new ConfigEntityRegistry( + SearchCommonTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java index 2cfa9f9187825..0ddfd77399325 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/config/SearchTestContainerConfiguration.java @@ -4,8 +4,9 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; import java.util.Optional; - +import javax.annotation.Nonnull; import org.apache.http.HttpHost; import org.apache.http.impl.nio.reactor.IOReactorConfig; import org.opensearch.action.support.WriteRequest; @@ -18,71 +19,81 @@ import org.springframework.context.annotation.Primary; import org.testcontainers.containers.GenericContainer; -import javax.annotation.Nonnull; - -import java.util.Map; - - /** - * This configuration is for `test containers` it builds these objects tied to - * the test container instantiated for tests. Could be ES or OpenSearch, etc. + * This configuration is for `test containers` it builds these objects tied to the test container + * instantiated for tests. Could be ES or OpenSearch, etc. * - * Does your test required a running instance? If no, {@link io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. + *

Does your test required a running instance? If no, {@link + * io.datahubproject.test.search.config.SearchCommonTestConfiguration} instead. */ @TestConfiguration public class SearchTestContainerConfiguration { - // This port is overridden by the specific test container instance - private static final int HTTP_PORT = 9200; - public static final int REFRESH_INTERVAL_SECONDS = 5; + // This port is overridden by the specific test container instance + private static final int HTTP_PORT = 9200; + public static final int REFRESH_INTERVAL_SECONDS = 5; - @Primary - @Bean(name = "searchRestHighLevelClient") - @Nonnull - public RestHighLevelClient getElasticsearchClient(@Qualifier("testSearchContainer") GenericContainer searchContainer) { - // A helper method to create a search test container defaulting to the current image and version, with the ability - // within firewalled environments to override with an environment variable to point to the offline repository. - // A helper method to construct a standard rest client for search. - final RestClientBuilder builder = - RestClient.builder(new HttpHost( - "localhost", - searchContainer.getMappedPort(HTTP_PORT), "http") - ).setHttpClientConfigCallback(httpAsyncClientBuilder -> - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build())); + @Primary + @Bean(name = "searchRestHighLevelClient") + @Nonnull + public RestHighLevelClient getElasticsearchClient( + @Qualifier("testSearchContainer") GenericContainer searchContainer) { + // A helper method to create a search test container defaulting to the current image and + // version, with the ability + // within firewalled environments to override with an environment variable to point to the + // offline repository. + // A helper method to construct a standard rest client for search. + final RestClientBuilder builder = + RestClient.builder( + new HttpHost("localhost", searchContainer.getMappedPort(HTTP_PORT), "http")) + .setHttpClientConfigCallback( + httpAsyncClientBuilder -> + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(1).build())); - builder.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder. - setConnectionRequestTimeout(30000)); + builder.setRequestConfigCallback( + requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(30000)); - return new RestHighLevelClient(builder); - } + return new RestHighLevelClient(builder); + } - /* - Cannot use the factory class without circular dependencies - */ - @Primary - @Bean(name = "searchBulkProcessor") - @Nonnull - public ESBulkProcessor getBulkProcessor(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - return ESBulkProcessor.builder(searchClient) - .async(true) - /* - * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful - * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! - */ - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) - .bulkRequestsLimit(10000) - .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) - .retryInterval(1L) - .numRetries(1) - .build(); - } + /* + Cannot use the factory class without circular dependencies + */ + @Primary + @Bean(name = "searchBulkProcessor") + @Nonnull + public ESBulkProcessor getBulkProcessor( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + return ESBulkProcessor.builder(searchClient) + .async(true) + /* + * Force a refresh as part of this request. This refresh policy does not scale for high indexing or search throughput but is useful + * to present a consistent view to for indices with very low traffic. And it is wonderful for tests! + */ + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .bulkRequestsLimit(10000) + .bulkFlushPeriod(REFRESH_INTERVAL_SECONDS - 1) + .retryInterval(1L) + .numRetries(1) + .build(); + } - @Primary - @Bean(name = "searchIndexBuilder") - @Nonnull - protected ESIndexBuilder getIndexBuilder(@Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { - GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); - return new ESIndexBuilder(searchClient, 1, 1, 3, 1, Map.of(), - false, false, - new ElasticSearchConfiguration(), gitVersion); - } + @Primary + @Bean(name = "searchIndexBuilder") + @Nonnull + protected ESIndexBuilder getIndexBuilder( + @Qualifier("searchRestHighLevelClient") RestHighLevelClient searchClient) { + GitVersion gitVersion = new GitVersion("0.0.0-test", "123456", Optional.empty()); + return new ESIndexBuilder( + searchClient, + 1, + 1, + 3, + 1, + Map.of(), + false, + false, + new ElasticSearchConfiguration(), + gitVersion); + } } diff --git a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java index 0e47202a9d237..f9684871f39e2 100644 --- a/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java +++ b/metadata-jobs/mae-consumer-job/src/main/java/com/linkedin/metadata/kafka/MaeConsumerApplication.java @@ -10,26 +10,35 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class, CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class}) -@ComponentScan(basePackages = { - //"com.linkedin.gms.factory.config", - //"com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.kafka", - "com.linkedin.metadata.boot.kafka", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity.update.indices" -}, - excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class), - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = SiblingGraphServiceFactory.class)} - ) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + // "com.linkedin.gms.factory.config", + // "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.kafka", + "com.linkedin.metadata.boot.kafka", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity.update.indices" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class), + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = SiblingGraphServiceFactory.class) + }) public class MaeConsumerApplication { public static void main(String[] args) { Class[] primarySources = {MaeConsumerApplication.class, MclConsumerConfig.class}; SpringApplication.run(primarySources, args); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java index d0190279930fe..69288cec8220a 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.entity.EntityService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -7,15 +9,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - @ActiveProfiles("test") -@SpringBootTest(classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {MaeConsumerApplication.class, MaeConsumerApplicationTestConfiguration.class}) public class MaeConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; @Test public void testMaeConsumerAutoWiring() { diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index aa097a52c8fc6..7135e4e44d459 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -18,30 +18,21 @@ @Import(value = {SystemAuthenticationFactory.class}) public class MaeConsumerApplicationTestConfiguration { - @MockBean - private KafkaHealthChecker kafkaHealthChecker; + @MockBean private KafkaHealthChecker kafkaHealthChecker; - @MockBean - private EntityServiceImpl _entityServiceImpl; + @MockBean private EntityServiceImpl _entityServiceImpl; - @MockBean - private SystemRestliEntityClient restliEntityClient; + @MockBean private SystemRestliEntityClient restliEntityClient; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - private GraphService _graphService; + @MockBean private GraphService _graphService; - @MockBean - private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; + @MockBean private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; - @MockBean - private ConfigEntityRegistry _configEntityRegistry; + @MockBean private ConfigEntityRegistry _configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java index fd15d36b109dd..1c7aa4fa22dd5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/DataHubUsageEventsProcessor.java @@ -22,7 +22,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @EnableKafka @@ -34,17 +33,22 @@ public class DataHubUsageEventsProcessor { private final DataHubUsageEventTransformer dataHubUsageEventTransformer; private final String indexName; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - public DataHubUsageEventsProcessor(ElasticsearchConnector elasticSearchConnector, - DataHubUsageEventTransformer dataHubUsageEventTransformer, IndexConvention indexConvention) { + public DataHubUsageEventsProcessor( + ElasticsearchConnector elasticSearchConnector, + DataHubUsageEventTransformer dataHubUsageEventTransformer, + IndexConvention indexConvention) { this.elasticSearchConnector = elasticSearchConnector; this.dataHubUsageEventTransformer = dataHubUsageEventTransformer; this.indexName = indexConvention.getIndexName("datahub_usage_event"); } - @KafkaListener(id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", topics = - "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", containerFactory = "simpleKafkaConsumer") + @KafkaListener( + id = "${DATAHUB_USAGE_EVENT_KAFKA_CONSUMER_GROUP_ID:datahub-usage-event-consumer-job-client}", + topics = "${DATAHUB_USAGE_EVENT_NAME:" + Topics.DATAHUB_USAGE_EVENT + "}", + containerFactory = "simpleKafkaConsumer") public void consume(final ConsumerRecord consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final String record = consumerRecord.value(); @@ -64,16 +68,20 @@ public void consume(final ConsumerRecord consumerRecord) { } /** - * DataHub Usage Event is written to an append-only index called a data stream. Due to circumstances - * it is possible that the event's id, even though it contains an epoch millisecond, results in duplicate ids - * in the index. The collisions will stall processing of the topic. To prevent the collisions we append - * the last 5 digits, padded with zeros, of the kafka offset to prevent the collision. + * DataHub Usage Event is written to an append-only index called a data stream. Due to + * circumstances it is possible that the event's id, even though it contains an epoch millisecond, + * results in duplicate ids in the index. The collisions will stall processing of the topic. To + * prevent the collisions we append the last 5 digits, padded with zeros, of the kafka offset to + * prevent the collision. + * * @param eventId the event's id * @param kafkaOffset the kafka offset for the message * @return unique identifier for event */ private static String generateDocumentId(String eventId, long kafkaOffset) { - return URLEncoder.encode(String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), StandardCharsets.UTF_8); + return URLEncoder.encode( + String.format("%s_%05d", eventId, leastSignificant(kafkaOffset, 5)), + StandardCharsets.UTF_8); } private static int leastSignificant(long kafkaOffset, int digits) { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java index d8cd49a736511..686e2a816ffb5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MclConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class MclConsumerConfig { @@ -24,10 +23,15 @@ public class MclConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public MclConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java index 796f570a1732e..479617f0b6a82 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeLogProcessor.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.Topics; - import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; @@ -29,47 +28,56 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) @Import({ - UpdateIndicesHook.class, - IngestionSchedulerHook.class, - EntityChangeEventGeneratorHook.class, - KafkaEventConsumerFactory.class, - SiblingAssociationHook.class + UpdateIndicesHook.class, + IngestionSchedulerHook.class, + EntityChangeEventGeneratorHook.class, + KafkaEventConsumerFactory.class, + SiblingAssociationHook.class }) @EnableKafka public class MetadataChangeLogProcessor { - @Getter - private final List hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + @Getter private final List hooks; + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public MetadataChangeLogProcessor(List metadataChangeLogHooks) { - this.hooks = metadataChangeLogHooks.stream() + this.hooks = + metadataChangeLogHooks.stream() .filter(MetadataChangeLogHook::isEnabled) .sorted(Comparator.comparing(MetadataChangeLogHook::executionOrder)) .collect(Collectors.toList()); this.hooks.forEach(MetadataChangeLogHook::init); } - @KafkaListener(id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", topics = { - "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", - "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES - + "}"}, containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_LOG_KAFKA_CONSUMER_GROUP_ID:generic-mae-consumer-job-client}", + topics = { + "${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}", + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}" + }, + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic MCL on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic MCL on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_mcl_count").inc(); MetadataChangeLog event; try { event = EventUtils.avroToPegasusMCL(record); - log.debug("Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully converted Avro MCL to Pegasus MCL. urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); @@ -78,15 +86,18 @@ public void consume(final ConsumerRecord consumerRecord) return; } - log.debug("Invoking MCL hooks for urn: {}, key: {}", event.getEntityUrn(), event.getEntityKeyAspect()); + log.debug( + "Invoking MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), + event.getEntityKeyAspect()); // Here - plug in additional "custom processor hooks" for (MetadataChangeLogHook hook : this.hooks) { if (!hook.isEnabled()) { continue; } - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. - Note that this represents "at most once" processing. @@ -96,7 +107,9 @@ public void consume(final ConsumerRecord consumerRecord) } // TODO: Manually commit kafka offsets after full processing. MetricUtils.counter(this.getClass(), "consumed_mcl_count").inc(); - log.debug("Successfully completed MCL hooks for urn: {}, key: {}", event.getEntityUrn(), + log.debug( + "Successfully completed MCL hooks for urn: {}, key: {}", + event.getEntityUrn(), event.getEntityKeyAspect()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index b8334cd7fac27..f70eaf6084a00 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,7 +4,6 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeLogProcessorCondition; - import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; @@ -14,23 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeLogProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mclBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mclBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java index 9235a1d98014c..8ad1638115dae 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCLBootstrapManagerFactory.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; - @Configuration @Conditional(MetadataChangeLogProcessorCondition.class) public class MCLBootstrapManagerFactory { @@ -26,8 +25,7 @@ public class MCLBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -36,8 +34,8 @@ public class MCLBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java index 0413cd09c36b7..90069f5a56c39 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/DataHubUsageEventsProcessorCondition.java @@ -5,15 +5,12 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class DataHubUsageEventsProcessorCondition implements Condition { @Override - public boolean matches( - ConditionContext context, - AnnotatedTypeMetadata metadata) { + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) && ( - env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + && (env.getProperty("DATAHUB_ANALYTICS_ENABLED") == null || "true".equals(env.getProperty("DATAHUB_ANALYTICS_ENABLED"))); } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java index a9e54e5354b42..036968f9f6759 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/EntityHydratorConfig.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestliEntityClientFactory.class}) public class EntityHydratorConfig { @@ -20,13 +19,16 @@ public class EntityHydratorConfig { @Qualifier("systemRestliEntityClient") private SystemRestliEntityClient _entityClient; - @Autowired - private EntityRegistry _entityRegistry; + @Autowired private EntityRegistry _entityRegistry; - public final static ImmutableSet EXCLUDED_ASPECTS = ImmutableSet.builder() + public static final ImmutableSet EXCLUDED_ASPECTS = + ImmutableSet.builder() .add("datasetUpstreamLineage", "upstreamLineage") .add("dataJobInputOutput") - .add("dataProcessInstanceRelationships", "dataProcessInstanceInput", "dataProcessInstanceOutput") + .add( + "dataProcessInstanceRelationships", + "dataProcessInstanceInput", + "dataProcessInstanceOutput") .add("inputFields") .build(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java index 4d7e60b74c858..db1c0b1a87541 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeLogProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeLogProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCL_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MAE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCL_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java index b0fade24e26ad..d757feef5aa95 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticEvent.java @@ -15,4 +15,4 @@ public abstract class ElasticEvent { public XContentBuilder buildJson() { return null; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java index bea75f7b282ee..5b5a4ab072109 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java @@ -1,9 +1,8 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.events.metadata.ChangeType; -import javax.annotation.Nonnull; - import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.delete.DeleteRequest; @@ -11,7 +10,6 @@ import org.opensearch.action.update.UpdateRequest; import org.opensearch.common.xcontent.XContentType; - @Slf4j public class ElasticsearchConnector { @@ -38,7 +36,8 @@ public void feedElasticEvent(@Nonnull ElasticEvent event) { @Nonnull private static IndexRequest createIndexRequest(@Nonnull ElasticEvent event) { - return new IndexRequest(event.getIndex()).id(event.getId()) + return new IndexRequest(event.getIndex()) + .id(event.getId()) .source(event.buildJson()) .opType(DocWriteRequest.OpType.CREATE); } @@ -50,12 +49,10 @@ private static DeleteRequest createDeleteRequest(@Nonnull ElasticEvent event) { @Nonnull private UpdateRequest createUpsertRequest(@Nonnull ElasticEvent event) { - return new UpdateRequest( - event.getIndex(), event.getId()) - .detectNoop(false) - .docAsUpsert(true) - .doc(event.buildJson(), XContentType.JSON) - .retryOnConflict(_numRetries); + return new UpdateRequest(event.getIndex(), event.getId()) + .detectNoop(false) + .docAsUpsert(true) + .doc(event.buildJson(), XContentType.JSON) + .retryOnConflict(_numRetries); } } - diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java index a3672975e42e6..884d74d3cd647 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnectorFactory.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.kafka.elasticsearch; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -8,9 +9,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - -import javax.annotation.Nonnull; - // TODO: Move this factory. @Slf4j @Configuration @@ -27,5 +25,4 @@ public class ElasticsearchConnectorFactory { public ElasticsearchConnector createInstance() { return new ElasticsearchConnector(bulkProcessor, numRetries); } - -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java index 230cd8433e6ff..d97290975ae26 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/JsonElasticEvent.java @@ -1,14 +1,13 @@ package com.linkedin.metadata.kafka.elasticsearch; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; public class JsonElasticEvent extends ElasticEvent { private final String _document; @@ -23,8 +22,12 @@ public XContentBuilder buildJson() { XContentBuilder builder = null; try { builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, _document); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + _document); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java index a3d6dca75068b..83d44cf609a41 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/MCEElasticEvent.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.kafka.elasticsearch; -import com.linkedin.data.template.RecordTemplate; import com.datahub.util.RecordUtils; +import com.linkedin.data.template.RecordTemplate; +import java.io.IOException; +import javax.annotation.Nullable; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.common.xcontent.XContentType; - -import java.io.IOException; -import javax.annotation.Nullable; - public class MCEElasticEvent extends ElasticEvent { @@ -28,8 +26,12 @@ public XContentBuilder buildJson() { try { String jsonString = RecordUtils.toJsonString(this._doc); builder = XContentFactory.jsonBuilder().prettyPrint(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, jsonString); + XContentParser parser = + XContentFactory.xContent(XContentType.JSON) + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + jsonString); builder.copyCurrentStructure(parser); } catch (IOException e) { e.printStackTrace(); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java index 39b47768a6dcf..f7e110f53a019 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/MetadataChangeLogHook.java @@ -3,35 +3,32 @@ import com.linkedin.mxe.MetadataChangeLog; import javax.annotation.Nonnull; - /** * Custom hook which is invoked on receiving a new {@link MetadataChangeLog} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + *

The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface MetadataChangeLogHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} /** - * Return whether the hook is enabled or not. If not enabled, the below invoke method is not triggered + * Return whether the hook is enabled or not. If not enabled, the below invoke method is not + * triggered */ default boolean isEnabled() { return true; } - /** - * Invoke the hook when a MetadataChangeLog is received - */ + /** Invoke the hook when a MetadataChangeLog is received */ void invoke(@Nonnull MetadataChangeLog log) throws Exception; /** * Controls hook execution ordering + * * @return order to execute */ default int executionOrder() { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java index 78c87ec8f4b3b..019d6b898ae6b 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.common.SystemMetadataServiceFactory; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; @@ -14,14 +16,17 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; - - // TODO: Backfill tests for this class in UpdateIndicesHookTest.java @Slf4j @Component -@Import({GraphServiceFactory.class, EntitySearchServiceFactory.class, TimeseriesAspectServiceFactory.class, - EntityRegistryFactory.class, SystemMetadataServiceFactory.class, SearchDocumentTransformerFactory.class}) +@Import({ + GraphServiceFactory.class, + EntitySearchServiceFactory.class, + TimeseriesAspectServiceFactory.class, + EntityRegistryFactory.class, + SystemMetadataServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class UpdateIndicesHook implements MetadataChangeLogHook { protected final UpdateIndicesService _updateIndicesService; @@ -44,7 +49,8 @@ public void invoke(@Nonnull final MetadataChangeLog event) { if (event.getSystemMetadata() != null) { if (event.getSystemMetadata().getProperties() != null) { if (UI_SOURCE.equals(event.getSystemMetadata().getProperties().get(APP_SOURCE))) { - // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid Kafka lag + // If coming from the UI, we pre-process the Update Indices hook as a fast path to avoid + // Kafka lag return; } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java index 3b65ecccad336..08790b1be3319 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHook.java @@ -36,19 +36,20 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events - * to the Platform Events topic. + * A {@link MetadataChangeLogHook} responsible for generating Entity Change Events to the Platform + * Events topic. */ @Slf4j @Component -@Import({EntityChangeEventGeneratorRegistry.class, EntityRegistryFactory.class, RestliEntityClientFactory.class}) +@Import({ + EntityChangeEventGeneratorRegistry.class, + EntityRegistryFactory.class, + RestliEntityClientFactory.class +}) public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { - /** - * The list of aspects that are supported for generating semantic change events. - */ + /** The list of aspects that are supported for generating semantic change events. */ private static final Set SUPPORTED_ASPECT_NAMES = ImmutableSet.of( Constants.GLOBAL_TAGS_ASPECT_NAME, @@ -74,10 +75,11 @@ public class EntityChangeEventGeneratorHook implements MetadataChangeLogHook { Constants.DOMAIN_KEY_ASPECT_NAME, Constants.TAG_KEY_ASPECT_NAME, Constants.STATUS_ASPECT_NAME); - /** - * The list of change types that are supported for generating semantic change events. - */ - private static final Set SUPPORTED_OPERATIONS = ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + + /** The list of change types that are supported for generating semantic change events. */ + private static final Set SUPPORTED_OPERATIONS = + ImmutableSet.of("CREATE", "UPSERT", "DELETE"); + private final EntityChangeEventGeneratorRegistry _entityChangeEventGeneratorRegistry; private final SystemRestliEntityClient _entityClient; private final EntityRegistry _entityRegistry; @@ -89,7 +91,8 @@ public EntityChangeEventGeneratorHook( @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntityRegistry entityRegistry, @Nonnull @Value("${entityChangeEvents.enabled:true}") Boolean isEnabled) { - _entityChangeEventGeneratorRegistry = Objects.requireNonNull(entityChangeEventGeneratorRegistry); + _entityChangeEventGeneratorRegistry = + Objects.requireNonNull(entityChangeEventGeneratorRegistry); _entityClient = Objects.requireNonNull(entityClient); _entityRegistry = Objects.requireNonNull(entityRegistry); _isEnabled = isEnabled; @@ -108,41 +111,46 @@ public void invoke(@Nonnull final MetadataChangeLog logEvent) throws Exception { // 2. Find and invoke a EntityChangeEventGenerator. // 3. Sink the output of the EntityChangeEventGenerator to a specific PDL change event. final AspectSpec aspectSpec = - _entityRegistry.getEntitySpec(logEvent.getEntityType()).getAspectSpec(logEvent.getAspectName()); + _entityRegistry + .getEntitySpec(logEvent.getEntityType()) + .getAspectSpec(logEvent.getAspectName()); assert aspectSpec != null; - final RecordTemplate fromAspect = logEvent.getPreviousAspectValue() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getPreviousAspectValue().getValue(), - logEvent.getPreviousAspectValue().getContentType(), - aspectSpec) - : null; - - final RecordTemplate toAspect = logEvent.getAspect() != null - ? GenericRecordUtils.deserializeAspect( - logEvent.getAspect().getValue(), - logEvent.getAspect().getContentType(), - aspectSpec) - : null; - - final List changeEvents = generateChangeEvents( - logEvent.getEntityUrn(), - logEvent.getEntityType(), - logEvent.getAspectName(), - createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), - createAspect(toAspect, logEvent.getSystemMetadata()), - logEvent.getCreated() - ); + final RecordTemplate fromAspect = + logEvent.getPreviousAspectValue() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getPreviousAspectValue().getValue(), + logEvent.getPreviousAspectValue().getContentType(), + aspectSpec) + : null; + + final RecordTemplate toAspect = + logEvent.getAspect() != null + ? GenericRecordUtils.deserializeAspect( + logEvent.getAspect().getValue(), + logEvent.getAspect().getContentType(), + aspectSpec) + : null; + + final List changeEvents = + generateChangeEvents( + logEvent.getEntityUrn(), + logEvent.getEntityType(), + logEvent.getAspectName(), + createAspect(fromAspect, logEvent.getPreviousSystemMetadata()), + createAspect(toAspect, logEvent.getSystemMetadata()), + logEvent.getCreated()); // Iterate through each transaction, emit change events as platform events. for (final ChangeEvent event : changeEvents) { PlatformEvent platformEvent = buildPlatformEvent(event); emitPlatformEvent( platformEvent, - String.format("%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn()) - ); - log.debug("Successfully emitted change event. category: {}, operation: {}, entity urn: {}", + String.format( + "%s-%s", Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, event.getEntityUrn())); + log.debug( + "Successfully emitted change event. category: {}, operation: {}, entity urn: {}", event.getCategory(), event.getOperation(), event.getEntityUrn()); @@ -156,35 +164,30 @@ private List generateChangeEvents( @Nonnull final String aspectName, @Nonnull final Aspect from, @Nonnull final Aspect to, - @Nonnull AuditStamp auditStamp - ) { + @Nonnull AuditStamp auditStamp) { final List> entityChangeEventGenerators = - _entityChangeEventGeneratorRegistry - .getEntityChangeEventGenerators(aspectName) - .stream() + _entityChangeEventGeneratorRegistry.getEntityChangeEventGenerators(aspectName).stream() // Note: Assumes that correct types have been registered for the aspect. .map(changeEventGenerator -> (EntityChangeEventGenerator) changeEventGenerator) .collect(Collectors.toList()); final List allChangeEvents = new ArrayList<>(); for (EntityChangeEventGenerator entityChangeEventGenerator : entityChangeEventGenerators) { allChangeEvents.addAll( - entityChangeEventGenerator.getChangeEvents(urn, entityName, aspectName, from, to, auditStamp)); + entityChangeEventGenerator.getChangeEvents( + urn, entityName, aspectName, from, to, auditStamp)); } return allChangeEvents; } private boolean isEligibleForProcessing(final MetadataChangeLog log) { - return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) && SUPPORTED_ASPECT_NAMES.contains( - log.getAspectName()); + return SUPPORTED_OPERATIONS.contains(log.getChangeType().toString()) + && SUPPORTED_ASPECT_NAMES.contains(log.getAspectName()); } - private void emitPlatformEvent(@Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) - throws Exception { + private void emitPlatformEvent( + @Nonnull final PlatformEvent event, @Nonnull final String partitioningKey) throws Exception { _entityClient.producePlatformEvent( - Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, - partitioningKey, - event - ); + Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME, partitioningKey, event); } private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { @@ -193,14 +196,15 @@ private PlatformEvent buildPlatformEvent(final ChangeEvent rawChangeEvent) { // 2. Build platform event PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(Constants.CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); + platformEvent.setHeader( + new PlatformEventHeader().setTimestampMillis(rawChangeEvent.getAuditStamp().getTime())); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } /** - * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link ChangeEvent}, which serves as a public - * API for outbound consumption. + * Thin mapping from internal Timeline API {@link ChangeEvent} to Kafka Platform Event {@link + * ChangeEvent}, which serves as a public API for outbound consumption. */ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeEvent) { com.linkedin.platform.event.v1.EntityChangeEvent changeEvent = @@ -216,7 +220,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE changeEvent.setAuditStamp(rawChangeEvent.getAuditStamp()); changeEvent.setVersion(0); if (rawChangeEvent.getParameters() != null) { - // This map should ideally contain only primitives at the leaves - integers, floats, booleans, strings. + // This map should ideally contain only primitives at the leaves - integers, floats, + // booleans, strings. changeEvent.setParameters(new Parameters(new DataMap(rawChangeEvent.getParameters()))); } return changeEvent; @@ -225,7 +230,8 @@ private RecordTemplate convertRawEventToChangeEvent(final ChangeEvent rawChangeE } } - private Aspect createAspect(@Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { + private Aspect createAspect( + @Nullable final RecordTemplate value, @Nullable final SystemMetadata systemMetadata) { return new Aspect(value, systemMetadata); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java index 1a3febb623314..82f1de0a889bf 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java @@ -22,10 +22,9 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; - /** - * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources defined - * within DataHub. + * This hook updates a stateful {@link IngestionScheduler} of Ingestion Runs for Ingestion Sources + * defined within DataHub. */ @Slf4j @Component @@ -41,8 +40,7 @@ public class IngestionSchedulerHook implements MetadataChangeLogHook { public IngestionSchedulerHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final IngestionScheduler scheduler, - @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${ingestionScheduler.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _scheduler = scheduler; _isEnabled = isEnabled; @@ -62,7 +60,8 @@ public void init() { public void invoke(@Nonnull MetadataChangeLog event) { if (isEligibleForProcessing(event)) { - log.info("Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", + log.info( + "Received {} to Ingestion Source. Rescheduling the source (if applicable). urn: {}, key: {}.", event.getChangeType(), event.getEntityUrn(), event.getEntityKeyAspect()); @@ -80,8 +79,9 @@ public void invoke(@Nonnull MetadataChangeLog event) { } /** - * Returns true if the event should be processed, which is only true if the event represents a create, update, or delete - * of an Ingestion Source Info aspect, which in turn contains the schedule associated with the source. + * Returns true if the event should be processed, which is only true if the event represents a + * create, update, or delete of an Ingestion Source Info aspect, which in turn contains the + * schedule associated with the source. */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return isIngestionSourceUpdate(event) || isIngestionSourceDeleted(event); @@ -90,8 +90,8 @@ private boolean isEligibleForProcessing(final MetadataChangeLog event) { private boolean isIngestionSourceUpdate(final MetadataChangeLog event) { return Constants.INGESTION_INFO_ASPECT_NAME.equals(event.getAspectName()) && (ChangeType.UPSERT.equals(event.getChangeType()) - || ChangeType.CREATE.equals(event.getChangeType()) - || ChangeType.DELETE.equals(event.getChangeType())); + || ChangeType.CREATE.equals(event.getChangeType()) + || ChangeType.DELETE.equals(event.getChangeType())); } private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { @@ -100,8 +100,8 @@ private boolean isIngestionSourceDeleted(final MetadataChangeLog event) { } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -109,15 +109,17 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a {@link MetadataChangeLog} event. - * The incoming event is expected to have a populated "aspect" field. + * Deserializes and returns an instance of {@link DataHubIngestionSourceInfo} extracted from a + * {@link MetadataChangeLog} event. The incoming event is expected to have a populated "aspect" + * field. */ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -125,12 +127,15 @@ private DataHubIngestionSourceInfo getInfoFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get Ingestion Source info from MetadataChangeLog event. Skipping processing.", + e); } - return (DataHubIngestionSourceInfo) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); + return (DataHubIngestionSourceInfo) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(Constants.INGESTION_INFO_ASPECT_NAME)); } @VisibleForTesting diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java index 064f987ff1ba9..67198d13772a3 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHook.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; + import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,6 +24,12 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -42,26 +50,19 @@ import org.springframework.context.annotation.Import; import org.springframework.stereotype.Component; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; - -import static com.linkedin.metadata.Constants.*; - - -/** - * This hook associates dbt datasets with their sibling entities - */ +/** This hook associates dbt datasets with their sibling entities */ @Slf4j @Component @Singleton -@Import({EntityRegistryFactory.class, RestliEntityClientFactory.class, EntitySearchServiceFactory.class}) +@Import({ + EntityRegistryFactory.class, + RestliEntityClientFactory.class, + EntitySearchServiceFactory.class +}) public class SiblingAssociationHook implements MetadataChangeLogHook { - public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = "urn:li:corpuser:__datahub_system_sibling_hook"; + public static final String SIBLING_ASSOCIATION_SYSTEM_ACTOR = + "urn:li:corpuser:__datahub_system_sibling_hook"; public static final String DBT_PLATFORM_NAME = "dbt"; // Older dbt sources produced lowercase subtypes, whereas we now @@ -80,8 +81,7 @@ public SiblingAssociationHook( @Nonnull final EntityRegistry entityRegistry, @Nonnull final SystemRestliEntityClient entityClient, @Nonnull final EntitySearchService searchService, - @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled - ) { + @Nonnull @Value("${siblings.enabled:true}") Boolean isEnabled) { _entityRegistry = entityRegistry; _entityClient = entityClient; _searchService = searchService; @@ -97,8 +97,7 @@ void setEnabled(Boolean newValue) { } @Override - public void init() { - } + public void init() {} @Override public boolean isEnabled() { @@ -135,28 +134,38 @@ public void invoke(@Nonnull MetadataChangeLog event) { private void handleEntityKeyEvent(DatasetUrn datasetUrn) { Filter entitiesWithYouAsSiblingFilter = createFilterForEntitiesWithYouAsSibling(datasetUrn); - final SearchResult searchResult = _searchService.search( - List.of(DATASET_ENTITY_NAME), - "*", - entitiesWithYouAsSiblingFilter, - null, - 0, - 10, + final SearchResult searchResult = + _searchService.search( + List.of(DATASET_ENTITY_NAME), + "*", + entitiesWithYouAsSiblingFilter, + null, + 0, + 10, new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); // we have a match of an entity with you as a sibling, associate yourself back - searchResult.getEntities().forEach(entity -> { - if (!entity.getEntity().equals(datasetUrn)) { - if (datasetUrn.getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { - setSiblingsAndSoftDeleteSibling(datasetUrn, searchResult.getEntities().get(0).getEntity()); - } else { - setSiblingsAndSoftDeleteSibling(searchResult.getEntities().get(0).getEntity(), datasetUrn); - } - } - }); + searchResult + .getEntities() + .forEach( + entity -> { + if (!entity.getEntity().equals(datasetUrn)) { + if (datasetUrn + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { + setSiblingsAndSoftDeleteSibling( + datasetUrn, searchResult.getEntities().get(0).getEntity()); + } else { + setSiblingsAndSoftDeleteSibling( + searchResult.getEntities().get(0).getEntity(), datasetUrn); + } + } + }); } - // If the upstream is a single source system node & subtype is source, then associate the upstream as your sibling + // If the upstream is a single source system node & subtype is source, then associate the upstream + // as your sibling private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUrn) { // we need both UpstreamLineage & Subtypes to determine whether to associate UpstreamLineage upstreamLineage = null; @@ -172,41 +181,54 @@ private void handleDbtDatasetEvent(MetadataChangeLog event, DatasetUrn datasetUr upstreamLineage = getUpstreamLineageFromEntityClient(datasetUrn); } - if ( - upstreamLineage != null - && subTypesAspectOfEntity != null - && upstreamLineage.hasUpstreams() - && subTypesAspectOfEntity.hasTypeNames() - && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) - || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2)) - ) { + if (upstreamLineage != null + && subTypesAspectOfEntity != null + && upstreamLineage.hasUpstreams() + && subTypesAspectOfEntity.hasTypeNames() + && (subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V1) + || subTypesAspectOfEntity.getTypeNames().contains(SOURCE_SUBTYPE_V2))) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - if ( - upstreams.size() == 1 - && !upstreams.get(0).getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) { + if (upstreams.size() == 1 + && !upstreams + .get(0) + .getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) { setSiblingsAndSoftDeleteSibling(datasetUrn, upstreams.get(0).getDataset()); } } } - // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as siblings + // if the dataset is not dbt--- it may be produced by a dbt dataset. If so, associate them as + // siblings private void handleSourceDatasetEvent(MetadataChangeLog event, DatasetUrn sourceUrn) { if (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME)) { UpstreamLineage upstreamLineage = getUpstreamLineageFromEvent(event); if (upstreamLineage != null && upstreamLineage.hasUpstreams()) { UpstreamArray upstreams = upstreamLineage.getUpstreams(); - // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only between dbt <> non-dbt - UpstreamArray dbtUpstreams = new UpstreamArray( - upstreams.stream() - .filter(obj -> obj.getDataset().getPlatformEntity().getPlatformNameEntity().equals(DBT_PLATFORM_NAME)) - .collect(Collectors.toList()) - ); - // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt model + // an entity can have merged lineage (eg. dbt + snowflake), but by default siblings are only + // between dbt <> non-dbt + UpstreamArray dbtUpstreams = + new UpstreamArray( + upstreams.stream() + .filter( + obj -> + obj.getDataset() + .getPlatformEntity() + .getPlatformNameEntity() + .equals(DBT_PLATFORM_NAME)) + .collect(Collectors.toList())); + // We're assuming a data asset (eg. snowflake table) will only ever be downstream of 1 dbt + // model if (dbtUpstreams.size() == 1) { setSiblingsAndSoftDeleteSibling(dbtUpstreams.get(0).getDataset(), sourceUrn); } else if (dbtUpstreams.size() > 1) { - log.error("{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", sourceUrn.toString(), dbtUpstreams.size()); + log.error( + "{} has an unexpected number of dbt upstreams: {}. Not adding any as siblings.", + sourceUrn.toString(), + dbtUpstreams.size()); } } } @@ -218,12 +240,10 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { log.info("Associating {} and {} as siblings.", dbtUrn.toString(), sourceUrn.toString()); - if ( - existingDbtSiblingAspect != null - && existingSourceSiblingAspect != null - && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) - && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString()) - ) { + if (existingDbtSiblingAspect != null + && existingSourceSiblingAspect != null + && existingDbtSiblingAspect.getSiblings().contains(sourceUrn.toString()) + && existingSourceSiblingAspect.getSiblings().contains(dbtUrn.toString())) { // we have already connected them- we can abort here return; } @@ -266,20 +286,24 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { // clean up any references to stale siblings that have been deleted List filteredNewSiblingsArray = - newSiblingsUrnArray.stream().filter(urn -> { - try { - return _entityClient.exists(urn); - } catch (RemoteInvocationException e) { - log.error("Error while checking existence of {}: {}", urn, e.toString()); - throw new RuntimeException("Error checking existence. Skipping processing.", e); - } - }).collect(Collectors.toList()); + newSiblingsUrnArray.stream() + .filter( + urn -> { + try { + return _entityClient.exists(urn); + } catch (RemoteInvocationException e) { + log.error("Error while checking existence of {}: {}", urn, e.toString()); + throw new RuntimeException("Error checking existence. Skipping processing.", e); + } + }) + .collect(Collectors.toList()); sourceSiblingAspect.setSiblings(new UrnArray(filteredNewSiblingsArray)); sourceSiblingAspect.setPrimary(false); MetadataChangeProposal sourceSiblingProposal = new MetadataChangeProposal(); - GenericAspect sourceSiblingAspectSerialized = GenericRecordUtils.serializeAspect(sourceSiblingAspect); + GenericAspect sourceSiblingAspectSerialized = + GenericRecordUtils.serializeAspect(sourceSiblingAspect); sourceSiblingProposal.setAspect(sourceSiblingAspectSerialized); sourceSiblingProposal.setAspectName(SIBLINGS_ASPECT_NAME); @@ -295,23 +319,21 @@ private void setSiblingsAndSoftDeleteSibling(Urn dbtUrn, Urn sourceUrn) { } } - /** - * Returns true if the event should be processed, which is only true if the event represents a dataset for now + * Returns true if the event should be processed, which is only true if the event represents a + * dataset for now */ private boolean isEligibleForProcessing(final MetadataChangeLog event) { return event.getEntityType().equals("dataset") && !event.getChangeType().equals(ChangeType.DELETE) - && ( - event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) - || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) - || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME) - ); + && (event.getAspectName().equals(UPSTREAM_LINEAGE_ASPECT_NAME) + || event.getAspectName().equals(SUB_TYPES_ASPECT_NAME) + || event.getAspectName().equals(DATASET_KEY_ASPECT_NAME)); } /** - * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an entityUrn - * or entityKey field, depending on which is present. + * Extracts and returns an {@link Urn} from a {@link MetadataChangeLog}. Extracts from either an + * entityUrn or entityKey field, depending on which is present. */ private Urn getUrnFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -319,14 +341,16 @@ private Urn getUrnFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get urn from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get urn from MetadataChangeLog event. Skipping processing.", e); } // Extract an URN from the Log Event. return EntityKeyUtils.getUrnFromLog(event, entitySpec.getKeyAspectSpec()); } /** - * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link UpstreamLineage} extracted from a {@link + * MetadataChangeLog} event. */ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -338,16 +362,19 @@ private UpstreamLineage getUpstreamLineageFromEvent(final MetadataChangeLog even entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get UpstreamLineage from MetadataChangeLog event. Skipping processing.", e); } - return (UpstreamLineage) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); + return (UpstreamLineage) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(UPSTREAM_LINEAGE_ASPECT_NAME)); } /** - * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link MetadataChangeLog} event. + * Deserializes and returns an instance of {@link SubTypes} extracted from a {@link + * MetadataChangeLog} event. */ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { EntitySpec entitySpec; @@ -359,22 +386,24 @@ private SubTypes getSubtypesFromEvent(final MetadataChangeLog event) { entitySpec = _entityRegistry.getEntitySpec(event.getEntityType()); } catch (IllegalArgumentException e) { log.error("Error while processing entity type {}: {}", event.getEntityType(), e.toString()); - throw new RuntimeException("Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); + throw new RuntimeException( + "Failed to get SubTypes from MetadataChangeLog event. Skipping processing.", e); } - return (SubTypes) GenericRecordUtils.deserializeAspect( - event.getAspect().getValue(), - event.getAspect().getContentType(), - entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); + return (SubTypes) + GenericRecordUtils.deserializeAspect( + event.getAspect().getValue(), + event.getAspect().getContentType(), + entitySpec.getAspectSpec(SUB_TYPES_ASPECT_NAME)); } @SneakyThrows private AuditStamp getAuditStamp() { - return new AuditStamp().setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + return new AuditStamp() + .setActor(Urn.createFromString(SIBLING_ASSOCIATION_SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); } - private Filter createFilterForEntitiesWithYouAsSibling( - final Urn entityUrn - ) { + private Filter createFilterForEntitiesWithYouAsSibling(final Urn entityUrn) { final Filter filter = new Filter(); final ConjunctiveCriterionArray disjunction = new ConjunctiveCriterionArray(); @@ -395,16 +424,16 @@ private Filter createFilterForEntitiesWithYouAsSibling( return filter; } - private SubTypes getSubtypesFromEntityClient( - final Urn urn - ) { + private SubTypes getSubtypesFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { - return new SubTypes(entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SUB_TYPES_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SUB_TYPES_ASPECT_NAME)) { + return new SubTypes( + entityResponse.getAspects().get(Constants.SUB_TYPES_ASPECT_NAME).getValue().data()); } else { return null; } @@ -413,17 +442,20 @@ private SubTypes getSubtypesFromEntityClient( } } - private UpstreamLineage getUpstreamLineageFromEntityClient( - final Urn urn - ) { + private UpstreamLineage getUpstreamLineageFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - return new UpstreamLineage(entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(UPSTREAM_LINEAGE_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + return new UpstreamLineage( + entityResponse + .getAspects() + .get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME) + .getValue() + .data()); } else { return null; } @@ -432,17 +464,16 @@ private UpstreamLineage getUpstreamLineageFromEntityClient( } } - private Siblings getSiblingsFromEntityClient( - final Urn urn - ) { + private Siblings getSiblingsFromEntityClient(final Urn urn) { try { - EntityResponse entityResponse = _entityClient.getV2( - urn, - ImmutableSet.of(SIBLINGS_ASPECT_NAME) - ); - - if (entityResponse != null && entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { - return new Siblings(entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); + EntityResponse entityResponse = + _entityClient.getV2(urn, ImmutableSet.of(SIBLINGS_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.SIBLINGS_ASPECT_NAME)) { + return new Siblings( + entityResponse.getAspects().get(Constants.SIBLINGS_ASPECT_NAME).getValue().data()); } else { return null; } @@ -450,5 +481,4 @@ private Siblings getSiblingsFromEntityClient( throw new RuntimeException("Failed to retrieve UpstreamLineage", e); } } - } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java index 03303b7723b9c..5e0b10b3d5049 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/BaseHydrator.java @@ -4,13 +4,10 @@ import com.linkedin.entity.EntityResponse; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class BaseHydrator { - /** - * Use values in the entity response to hydrate the document - */ - protected abstract void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse); - + /** Use values in the entity response to hydrate the document */ + protected abstract void hydrateFromEntityResponse( + ObjectNode document, EntityResponse entityResponse); } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java index 493f7424758cc..9dfbdb280b215 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/ChartHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.chart.ChartInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.ChartKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class ChartHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class ChartHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new ChartInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + CHART_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new ChartKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java index 0b8735533ed06..8b7b63f1f3240 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/CorpUserHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.entity.EntityResponse; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.CorpUserKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class CorpUserHydrator extends BaseHydrator { @@ -21,9 +20,11 @@ public class CorpUserHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new CorpUserInfo(dataMap).getDisplayName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(USER_NAME, new CorpUserKey(dataMap).getUsername())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java index 8b376128b7d11..fcafb3aabc860 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DashboardHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.dashboard.DashboardInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DashboardKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DashboardHydrator extends BaseHydrator { private static final String DASHBOARD_TOOL = "dashboardTool"; @@ -20,9 +19,12 @@ public class DashboardHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(TITLE, new DashboardInfo(dataMap).getTitle())); + mappingHelper.mapToResult( + DASHBOARD_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(DASHBOARD_TOOL, new DashboardKey(dataMap).getDashboardTool())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java index d847168de7783..88efe53f5c53e 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataFlowHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datajob.DataFlowInfo; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DataFlowKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataFlowHydrator extends BaseHydrator { @@ -21,9 +20,12 @@ public class DataFlowHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataFlowInfo(dataMap).getName())); + mappingHelper.mapToResult( + CORP_USER_KEY_ASPECT_NAME, + (jsonNodes, dataMap) -> + jsonNodes.put(ORCHESTRATOR, new DataFlowKey(dataMap).getOrchestrator())); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java index d9c99e8570e68..d8ea57a467277 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DataJobHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -11,9 +13,6 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DataJobHydrator extends BaseHydrator { @@ -24,8 +23,9 @@ public class DataJobHydrator extends BaseHydrator { protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, document); - mappingHelper.mapToResult(DATA_JOB_INFO_ASPECT_NAME, (jsonNodes, dataMap) -> - jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); + mappingHelper.mapToResult( + DATA_JOB_INFO_ASPECT_NAME, + (jsonNodes, dataMap) -> jsonNodes.put(NAME, new DataJobInfo(dataMap).getName())); try { mappingHelper.mapToResult(DATA_JOB_KEY_ASPECT_NAME, this::mapKey); } catch (Exception e) { @@ -35,8 +35,10 @@ protected void hydrateFromEntityResponse(ObjectNode document, EntityResponse ent private void mapKey(ObjectNode jsonNodes, DataMap dataMap) { DataJobKey dataJobKey = new DataJobKey(dataMap); - DataFlowKey dataFlowKey = (DataFlowKey) EntityKeyUtils - .convertUrnToEntityKeyInternal(dataJobKey.getFlow(), new DataFlowKey().schema()); + DataFlowKey dataFlowKey = + (DataFlowKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + dataJobKey.getFlow(), new DataFlowKey().schema()); jsonNodes.put(ORCHESTRATOR, dataFlowKey.getOrchestrator()); } } diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java index 715b23e48b5b9..d95faf4373521 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/DatasetHydrator.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -8,9 +10,6 @@ import com.linkedin.metadata.key.DatasetKey; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class DatasetHydrator extends BaseHydrator { diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java index 0a3b38517eaad..7a8fdd11fac43 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hydrator/EntityHydrator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hydrator; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; + import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.common.urn.Urn; @@ -13,14 +16,9 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.config.EntityHydratorConfig.EXCLUDED_ASPECTS; - - @Slf4j @RequiredArgsConstructor public class EntityHydrator { @@ -47,12 +45,17 @@ public Optional getHydratedEntity(String entityTypeName, String urn) // Hydrate fields from snapshot EntityResponse entityResponse; try { - Set aspectNames = Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) - .map(spec -> spec.getAspectSpecs().stream().map(AspectSpec::getName) - .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) - .collect(Collectors.toSet())) + Set aspectNames = + Optional.ofNullable(_entityRegistry.getEntitySpecs().get(urnObj.getEntityType())) + .map( + spec -> + spec.getAspectSpecs().stream() + .map(AspectSpec::getName) + .filter(aspectName -> !EXCLUDED_ASPECTS.contains(aspectName)) + .collect(Collectors.toSet())) .orElse(Set.of()); - entityResponse = _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); + entityResponse = + _entityClient.batchGetV2(Collections.singleton(urnObj), aspectNames).get(urnObj); } catch (RemoteInvocationException | URISyntaxException e) { log.error("Error while calling GMS to hydrate entity for urn {}", urn); return Optional.empty(); @@ -83,7 +86,10 @@ public Optional getHydratedEntity(String entityTypeName, String urn) _datasetHydrator.hydrateFromEntityResponse(document, entityResponse); break; default: - log.error("Unable to find valid hydrator for entity type: {} urn: {}", entityResponse.getEntityName(), urn); + log.error( + "Unable to find valid hydrator for entity type: {} urn: {}", + entityResponse.getEntityName(), + urn); return Optional.empty(); } return Optional.of(document); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java index 5beb6bdd765a2..30250f14e93e5 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/transformer/DataHubUsageEventTransformer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.transformer; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,24 +19,28 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.datahubusage.DataHubUsageEventConstants.*; - - -/** - * Transformer that transforms usage event (schema defined HERE) into a search document - */ +/** Transformer that transforms usage event (schema defined HERE) into a search document */ @Slf4j @Component public class DataHubUsageEventTransformer { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final Set EVENTS_WITH_ENTITY_URN = - ImmutableSet.of(DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, - DataHubUsageEventType.ENTITY_VIEW_EVENT, DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, + ImmutableSet.of( + DataHubUsageEventType.SEARCH_RESULT_CLICK_EVENT, + DataHubUsageEventType.BROWSE_RESULT_CLICK_EVENT, + DataHubUsageEventType.ENTITY_VIEW_EVENT, + DataHubUsageEventType.ENTITY_SECTION_VIEW_EVENT, DataHubUsageEventType.ENTITY_ACTION_EVENT); private final EntityHydrator _entityHydrator; @@ -97,7 +104,8 @@ public Optional transformDataHubUsageEvent(String dataHubUs try { return Optional.of( - new TransformedDocument(getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); + new TransformedDocument( + getId(eventDocument), OBJECT_MAPPER.writeValueAsString(eventDocument))); } catch (JsonProcessingException e) { log.info("Failed to package document: {}", eventDocument); return Optional.empty(); @@ -128,13 +136,21 @@ private void setFieldsForEntity(EntityType entityType, String urn, ObjectNode se log.info("No matches for urn {}", urn); return; } - entityObject.get().fieldNames() + entityObject + .get() + .fieldNames() .forEachRemaining( - key -> searchObject.put(entityType.name().toLowerCase() + "_" + key, entityObject.get().get(key).asText())); + key -> + searchObject.put( + entityType.name().toLowerCase() + "_" + key, + entityObject.get().get(key).asText())); } private String getId(final ObjectNode eventDocument) { - return eventDocument.get(TYPE).asText() + "_" + eventDocument.get(ACTOR_URN).asText() + "_" + eventDocument.get( - TIMESTAMP).asText(); + return eventDocument.get(TYPE).asText() + + "_" + + eventDocument.get(ACTOR_URN).asText() + + "_" + + eventDocument.get(TIMESTAMP).asText(); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java index a237e3e27f168..f2bb8a5fc9222 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/EntityRegistryTestUtil.java @@ -5,14 +5,18 @@ import com.linkedin.metadata.models.registry.EntityRegistry; public class EntityRegistryTestUtil { - private EntityRegistryTestUtil() { - } + private EntityRegistryTestUtil() {} - public static final EntityRegistry ENTITY_REGISTRY; + public static final EntityRegistry ENTITY_REGISTRY; - static { - EntityRegistryTestUtil.class.getClassLoader().setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - ENTITY_REGISTRY = new ConfigEntityRegistry( - EntityRegistryTestUtil.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); - } + static { + EntityRegistryTestUtil.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + ENTITY_REGISTRY = + new ConfigEntityRegistry( + EntityRegistryTestUtil.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yml")); + } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java index 85b00e9ade6b8..a1a9bd4cd413a 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/GraphIndexUtilsTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.graph.GraphIndexUtils.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -29,11 +33,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.graph.GraphIndexUtils.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static org.testng.Assert.*; - - public class GraphIndexUtilsTest { private static final String UPSTREAM_RELATIONSHIP_PATH = "/upstreams/*/dataset"; @@ -54,7 +53,9 @@ public class GraphIndexUtilsTest { public void setupTest() { _createdActorUrn = UrnUtils.getUrn(CREATED_ACTOR_URN); _updatedActorUrn = UrnUtils.getUrn(UPDATED_ACTOR_URN); - _datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); + _datasetUrn = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)"); _upstreamDataset1 = UrnUtils.toDatasetUrn("snowflake", "test", "DEV"); _upstreamDataset2 = UrnUtils.toDatasetUrn("snowflake", "test2", "DEV"); _mockRegistry = ENTITY_REGISTRY; @@ -74,29 +75,30 @@ public void testExtractGraphEdgesDefault() { for (Map.Entry> entry : extractedFields.entrySet()) { // check specifically for the upstreams relationship entry if (entry.getKey().getPath().toString().equals(UPSTREAM_RELATIONSHIP_PATH)) { - List edgesToAdd = GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); + List edgesToAdd = + GraphIndexUtils.extractGraphEdges(entry, upstreamLineage, _datasetUrn, event, true); List expectedEdgesToAdd = new ArrayList<>(); // edges contain default created event time and created actor from system metadata - Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - null - ); - Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset2, - entry.getKey().getRelationshipName(), - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - null - ); + Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + null); + Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset2, + entry.getKey().getRelationshipName(), + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + null); expectedEdgesToAdd.add(edge1); expectedEdgesToAdd.add(edge2); assertEquals(expectedEdgesToAdd.size(), edgesToAdd.size()); @@ -108,26 +110,26 @@ public void testExtractGraphEdgesDefault() { @Test public void testMergeEdges() { - final Edge edge1 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - CREATED_EVENT_TIME, - _createdActorUrn, - UPDATED_EVENT_TIME_1, - _updatedActorUrn, - Collections.singletonMap("foo", "bar") - ); - final Edge edge2 = new Edge( - _datasetUrn, - _upstreamDataset1, - DOWNSTREAM_RELATIONSHIP_TYPE, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - UPDATED_EVENT_TIME_2, - _updatedActorUrn, - Collections.singletonMap("foo", "baz") - ); + final Edge edge1 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + CREATED_EVENT_TIME, + _createdActorUrn, + UPDATED_EVENT_TIME_1, + _updatedActorUrn, + Collections.singletonMap("foo", "bar")); + final Edge edge2 = + new Edge( + _datasetUrn, + _upstreamDataset1, + DOWNSTREAM_RELATIONSHIP_TYPE, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + UPDATED_EVENT_TIME_2, + _updatedActorUrn, + Collections.singletonMap("foo", "baz")); final Edge edge3 = mergeEdges(edge1, edge2); assertEquals(edge3.getSource(), edge1.getSource()); assertEquals(edge3.getDestination(), edge1.getDestination()); @@ -144,11 +146,13 @@ private UpstreamLineage createUpstreamLineage() { UpstreamArray upstreams = new UpstreamArray(); Upstream upstream1 = new Upstream(); upstream1.setDataset(_upstreamDataset1); - upstream1.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream1.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream1.setType(DatasetLineageType.TRANSFORMED); Upstream upstream2 = new Upstream(); upstream2.setDataset(_upstreamDataset2); - upstream2.setAuditStamp(new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); + upstream2.setAuditStamp( + new AuditStamp().setActor(_updatedActorUrn).setTime(UPDATED_EVENT_TIME_1)); upstream2.setType(DatasetLineageType.TRANSFORMED); upstreams.add(upstream1); upstreams.add(upstream2); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java index 0897cfa01084f..724b91edbf8a1 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/MCLProcessingTestDataGenerator.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; @@ -18,15 +21,9 @@ import java.util.HashMap; import java.util.Map; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.UpdateIndicesHookTest.*; - - public class MCLProcessingTestDataGenerator { - private MCLProcessingTestDataGenerator() { - - } + private MCLProcessingTestDataGenerator() {} public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException { MetadataChangeLog event = new MetadataChangeLog(); @@ -39,7 +36,8 @@ public static MetadataChangeLog createBaseChangeLog() throws URISyntaxException event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); - event.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); + event.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn(TEST_ACTOR_URN)).setTime(EVENT_TIME)); return event; } @@ -68,7 +66,8 @@ public static MetadataChangeLog setSystemMetadataWithForceIndexing(MetadataChang return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog setPreviousData(MetadataChangeLog changeLog, MetadataChangeLog previousState) { + public static MetadataChangeLog setPreviousData( + MetadataChangeLog changeLog, MetadataChangeLog previousState) { changeLog.setPreviousAspectValue(previousState.getAspect()); return changeLog.setPreviousSystemMetadata(previousState.getSystemMetadata()); } @@ -93,7 +92,8 @@ public static MetadataChangeLog modifySystemMetadata2(MetadataChangeLog changeLo return changeLog.setSystemMetadata(systemMetadata); } - public static MetadataChangeLog modifyAspect(MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { + public static MetadataChangeLog modifyAspect( + MetadataChangeLog changeLog, UpstreamLineage upstreamLineage) { return changeLog.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); } @@ -109,7 +109,8 @@ public static UpstreamLineage createBaseLineageAspect() throws URISyntaxExceptio return upstreamLineage; } - public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) throws URISyntaxException { + public static UpstreamLineage addLineageEdge(UpstreamLineage upstreamLineage) + throws URISyntaxException { UpstreamArray upstreamArray = upstreamLineage.getUpstreams(); Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); @@ -127,5 +128,4 @@ public static UpstreamLineage modifyNonSearchableField(UpstreamLineage upstreamL upstreamArray.set(0, upstream); return upstreamLineage.setUpstreams(upstreamArray); } - } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java index 90f8f208c4cb6..12c8ad7d0c69b 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/UpdateIndicesHookTest.java @@ -1,7 +1,10 @@ package com.linkedin.metadata.kafka.hook; -import com.linkedin.metadata.config.SystemUpdateConfiguration; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; +import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.linkedin.common.AuditStamp; import com.linkedin.common.InputField; import com.linkedin.common.InputFieldArray; @@ -22,9 +25,11 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; +import com.linkedin.metadata.config.SystemUpdateConfiguration; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.key.ChartKey; import com.linkedin.metadata.models.AspectSpec; @@ -43,31 +48,28 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaField; - import java.net.URISyntaxException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collections; import org.mockito.Mockito; import org.springframework.beans.factory.annotation.Value; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Collections; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; -import static com.linkedin.metadata.kafka.hook.MCLProcessingTestDataGenerator.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - public class UpdateIndicesHookTest { -// going to want a test where we have an upstreamLineage aspect with finegrained, check that we call _graphService.addEdge for each edge -// as well as _graphService.removeEdgesFromNode for each field and their relationships + // going to want a test where we have an upstreamLineage aspect with finegrained, check that we + // call _graphService.addEdge for each edge + // as well as _graphService.removeEdgesFromNode for each field and their relationships static final long EVENT_TIME = 123L; - static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; - static final String TEST_DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; - static final String TEST_DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; + static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)"; + static final String TEST_DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)"; + static final String TEST_DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressKafkaDataset,PROD)"; static final String TEST_CHART_URN = "urn:li:chart:(looker,dashboard_elements.1)"; static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; static final String DOWNSTREAM_OF = "DownstreamOf"; @@ -107,88 +109,112 @@ public void setupTest() { ElasticSearchConfiguration elasticSearchConfiguration = new ElasticSearchConfiguration(); SystemUpdateConfiguration systemUpdateConfiguration = new SystemUpdateConfiguration(); systemUpdateConfiguration.setWaitForSystemUpdate(false); - Mockito.when(_mockConfigurationProvider.getElasticSearch()).thenReturn(elasticSearchConfiguration); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - ENTITY_REGISTRY, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); - _updateIndicesHook = new UpdateIndicesHook( - _updateIndicesService, - true - ); + Mockito.when(_mockConfigurationProvider.getElasticSearch()) + .thenReturn(elasticSearchConfiguration); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + ENTITY_REGISTRY, + _searchDocumentTransformer, + _mockEntityIndexBuilders); + _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); } @Test public void testFineGrainedLineageEdgesAreAdded() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test public void testFineGrainedLineageEdgesAreAddedRestate() throws Exception { _updateIndicesService.setGraphDiffMode(false); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - MetadataChangeLog event = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + MetadataChangeLog event = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.RESTATE); _updateIndicesHook.invoke(event); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), - Mockito.eq(URLEncoder.encode("urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", - StandardCharsets.UTF_8))); + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), + Mockito.eq( + URLEncoder.encode( + "urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD)", + StandardCharsets.UTF_8))); } @Test public void testInputFieldsEdgesAreAdded() throws Exception { - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:looker,thelook.explore.order_items,PROD),users.count)"); String downstreamFieldPath = "users.count"; MetadataChangeLog event = createInputFieldsMCL(upstreamUrn, downstreamFieldPath); EntityRegistry mockEntityRegistry = createMockEntityRegistry(); - _updateIndicesService = new UpdateIndicesService( - _mockGraphService, - _mockEntitySearchService, - _mockTimeseriesAspectService, - _mockSystemMetadataService, - mockEntityRegistry, - _searchDocumentTransformer, - _mockEntityIndexBuilders - ); + _updateIndicesService = + new UpdateIndicesService( + _mockGraphService, + _mockEntitySearchService, + _mockTimeseriesAspectService, + _mockSystemMetadataService, + mockEntityRegistry, + _searchDocumentTransformer, + _mockEntityIndexBuilders); _updateIndicesHook = new UpdateIndicesHook(_updateIndicesService, true); _updateIndicesHook.invoke(event); - Urn downstreamUrn = UrnUtils.getUrn(String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); + Urn downstreamUrn = + UrnUtils.getUrn( + String.format("urn:li:schemaField:(%s,%s)", TEST_CHART_URN, downstreamFieldPath)); Edge edge = new Edge(downstreamUrn, upstreamUrn, DOWNSTREAM_OF, null, null, null, null, null); Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.eq(edge)); - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode( - Mockito.eq(downstreamUrn), - Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), - Mockito.eq(newRelationshipFilter(new Filter().setOr(new ConjunctiveCriterionArray()), RelationshipDirection.OUTGOING)) - ); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode( + Mockito.eq(downstreamUrn), + Mockito.eq(new ArrayList<>(Collections.singleton(DOWNSTREAM_OF))), + Mockito.eq( + newRelationshipFilter( + new Filter().setOr(new ConjunctiveCriterionArray()), + RelationshipDirection.OUTGOING))); } @Test @@ -207,7 +233,9 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockGraphService, Mockito.times(1)).addEdge(Mockito.any()); // Update document Mockito.verify(_mockEntitySearchService, Mockito.times(1)) - .upsertDocument(Mockito.eq(DATASET_ENTITY_NAME), Mockito.any(), + .upsertDocument( + Mockito.eq(DATASET_ENTITY_NAME), + Mockito.any(), Mockito.eq(URLEncoder.encode(TEST_DATASET_URN, StandardCharsets.UTF_8))); /* @@ -261,7 +289,6 @@ public void testMCLProcessExhaustive() throws URISyntaxException { Mockito.verify(_mockEntitySearchService, Mockito.times(0)) .upsertDocument(Mockito.any(), Mockito.any(), Mockito.any()); - /* * noOpUpsert */ @@ -356,8 +383,8 @@ public void testMCLProcessExhaustive() throws URISyntaxException { _updateIndicesHook.invoke(changeLog); // Forced removal of all edges - Mockito.verify(_mockGraphService, Mockito.times(1)).removeEdgesFromNode(Mockito.any(), - Mockito.any(), Mockito.any()); + Mockito.verify(_mockGraphService, Mockito.times(1)) + .removeEdgesFromNode(Mockito.any(), Mockito.any(), Mockito.any()); // Forced add of edges Mockito.verify(_mockGraphService, Mockito.times(2)).addEdge(Mockito.any()); // Forced document update @@ -369,45 +396,64 @@ public void testMCLProcessExhaustive() throws URISyntaxException { public void testMCLUIPreProcessed() throws Exception { _updateIndicesService.setGraphDiffMode(true); _updateIndicesService.setSearchDiffMode(true); - Urn upstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); - Urn downstreamUrn = UrnUtils.getUrn("urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); - - MetadataChangeLog changeLog = createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); + Urn upstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD),foo_info)"); + Urn downstreamUrn = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_foo)"); + + MetadataChangeLog changeLog = + createUpstreamLineageMCLUIPreProcessed(upstreamUrn, downstreamUrn, ChangeType.UPSERT); _updateIndicesHook.invoke(changeLog); - Mockito.verifyNoInteractions(_mockEntitySearchService, _mockGraphService, _mockTimeseriesAspectService, _mockSystemMetadataService); + Mockito.verifyNoInteractions( + _mockEntitySearchService, + _mockGraphService, + _mockTimeseriesAspectService, + _mockSystemMetadataService); } private EntityRegistry createMockEntityRegistry() { - // need to mock this registry instead of using test-entity-registry.yml because inputFields does not work due to a known bug + // need to mock this registry instead of using test-entity-registry.yml because inputFields does + // not work due to a known bug EntityRegistry mockEntityRegistry = Mockito.mock(EntityRegistry.class); EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = createMockAspectSpec(InputFields.class, InputFields.dataSchema()); - AspectSpec upstreamLineageAspectSpec = createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(upstreamLineageAspectSpec); + AspectSpec upstreamLineageAspectSpec = + createMockAspectSpec(UpstreamLineage.class, UpstreamLineage.dataSchema()); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(mockEntityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(upstreamLineageAspectSpec); Mockito.when(aspectSpec.isTimeseries()).thenReturn(false); Mockito.when(aspectSpec.getName()).thenReturn(Constants.INPUT_FIELDS_ASPECT_NAME); Mockito.when(upstreamLineageAspectSpec.isTimeseries()).thenReturn(false); - Mockito.when(upstreamLineageAspectSpec.getName()).thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + Mockito.when(upstreamLineageAspectSpec.getName()) + .thenReturn(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); AspectSpec chartKeyAspectSpec = createMockAspectSpec(ChartKey.class, ChartKey.dataSchema()); Mockito.when(entitySpec.getKeyAspectSpec()).thenReturn(chartKeyAspectSpec); return mockEntityRegistry; } - private AspectSpec createMockAspectSpec(Class clazz, RecordDataSchema schema) { + private AspectSpec createMockAspectSpec( + Class clazz, RecordDataSchema schema) { AspectSpec mockSpec = Mockito.mock(AspectSpec.class); Mockito.when(mockSpec.getDataTemplateClass()).thenReturn((Class) clazz); Mockito.when(mockSpec.getPegasusSchema()).thenReturn(schema); return mockSpec; } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn) + throws Exception { return createUpstreamLineageMCL(upstreamUrn, downstreamUrn, ChangeType.UPSERT); } - private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + private MetadataChangeLog createUpstreamLineageMCL( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.DATASET_ENTITY_NAME); event.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); @@ -427,7 +473,9 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre final UpstreamArray upstreamArray = new UpstreamArray(); final Upstream upstream = new Upstream(); upstream.setType(DatasetLineageType.TRANSFORMED); - upstream.setDataset(DatasetUrn.createFromString("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); + upstream.setDataset( + DatasetUrn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleCypressHdfsDataset,PROD)")); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); @@ -438,8 +486,10 @@ private MetadataChangeLog createUpstreamLineageMCL(Urn upstreamUrn, Urn downstre return event; } - private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { - final MetadataChangeLog metadataChangeLog = createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); + private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed( + Urn upstreamUrn, Urn downstreamUrn, ChangeType changeType) throws Exception { + final MetadataChangeLog metadataChangeLog = + createUpstreamLineageMCL(upstreamUrn, downstreamUrn, changeType); final StringMap properties = new StringMap(); properties.put(APP_SOURCE, UI_SOURCE); final SystemMetadata systemMetadata = new SystemMetadata().setProperties(properties); @@ -447,7 +497,8 @@ private MetadataChangeLog createUpstreamLineageMCLUIPreProcessed(Urn upstreamUrn return metadataChangeLog; } - private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) throws Exception { + private MetadataChangeLog createInputFieldsMCL(Urn upstreamUrn, String downstreamFieldPath) + throws Exception { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(Constants.CHART_ENTITY_NAME); event.setAspectName(Constants.INPUT_FIELDS_ASPECT_NAME); diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java index 7d9619f3e2d1c..8400e19ce49a3 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/EntityChangeEventGeneratorHookTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka.hook.event; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.assertion.AssertionResult; @@ -64,30 +68,27 @@ import com.linkedin.platform.event.v1.Parameters; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - /** * Tests the {@link EntityChangeEventGeneratorHook}. * - * TODO: Include Schema Field Tests, description update tests. + *

TODO: Include Schema Field Tests, description update tests. */ public class EntityChangeEventGeneratorHookTest { private static final long EVENT_TIME = 123L; - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleDataset,PROD)"; private static final String TEST_ACTOR_URN = "urn:li:corpuser:test"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:123"; private static final String TEST_RUN_ID = "runId"; - private static final String TEST_DATA_PROCESS_INSTANCE_URN = "urn:li:dataProcessInstance:instance"; - private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = "urn:li:dataProcessInstance:parent"; + private static final String TEST_DATA_PROCESS_INSTANCE_URN = + "urn:li:dataProcessInstance:instance"; + private static final String TEST_DATA_PROCESS_INSTANCE_PARENT_URN = + "urn:li:dataProcessInstance:parent"; private static final String TEST_DATA_FLOW_URN = "urn:li:dataFlow:flow"; private static final String TEST_DATA_JOB_URN = "urn:li:dataJob:job"; private Urn actorUrn; @@ -101,9 +102,11 @@ public void setupTest() throws URISyntaxException { actorUrn = Urn.createFromString(TEST_ACTOR_URN); _mockClient = Mockito.mock(SystemRestliEntityClient.class); _mockEntityService = Mockito.mock(EntityService.class); - EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = createEntityChangeEventGeneratorRegistry(); + EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry = + createEntityChangeEventGeneratorRegistry(); _entityChangeEventHook = - new EntityChangeEventGeneratorHook(entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); + new EntityChangeEventGeneratorHook( + entityChangeEventGeneratorRegistry, _mockClient, createMockEntityRegistry(), true); } @Test @@ -114,10 +117,8 @@ public void testInvokeEntityAddTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags newTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - newTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + newTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -128,8 +129,14 @@ public void testInvokeEntityAddTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.ADD, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.ADD, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -142,10 +149,8 @@ public void testInvokeEntityRemoveTagChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlobalTags existingTags = new GlobalTags(); final TagUrn newTagUrn = new TagUrn("Test"); - existingTags.setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation() - .setTag(newTagUrn) - ))); + existingTags.setTags( + new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(newTagUrn)))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(existingTags)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -156,8 +161,14 @@ public void testInvokeEntityRemoveTagChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.TAG, - ChangeOperation.REMOVE, newTagUrn.toString(), ImmutableMap.of("tagUrn", newTagUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.TAG, + ChangeOperation.REMOVE, + newTagUrn.toString(), + ImmutableMap.of("tagUrn", newTagUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -170,11 +181,9 @@ public void testInvokeEntityAddTermChange() throws Exception { event.setChangeType(ChangeType.UPSERT); final GlossaryTerms newTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - newTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + newTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); final GlossaryTerms previousTerms = new GlossaryTerms(); previousTerms.setTerms(new GlossaryTermAssociationArray()); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); @@ -188,8 +197,13 @@ public void testInvokeEntityAddTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.ADD, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.ADD, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -205,11 +219,9 @@ public void testInvokeEntityRemoveTermChange() throws Exception { newTerms.setTerms(new GlossaryTermAssociationArray()); final GlossaryTerms previousTerms = new GlossaryTerms(); final GlossaryTermUrn glossaryTermUrn = new GlossaryTermUrn("TestTerm"); - previousTerms.setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation() - .setUrn(glossaryTermUrn) - ) - )); + previousTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of(new GlossaryTermAssociation().setUrn(glossaryTermUrn)))); event.setAspect(GenericRecordUtils.serializeAspect(newTerms)); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousTerms)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); @@ -221,8 +233,13 @@ public void testInvokeEntityRemoveTermChange() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.GLOSSARY_TERM, - ChangeOperation.REMOVE, glossaryTermUrn.toString(), ImmutableMap.of("termUrn", glossaryTermUrn.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.GLOSSARY_TERM, + ChangeOperation.REMOVE, + glossaryTermUrn.toString(), + ImmutableMap.of("termUrn", glossaryTermUrn.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); @@ -236,8 +253,7 @@ public void testInvokeEntitySetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains newDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - newDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + newDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setAspect(GenericRecordUtils.serializeAspect(newDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -248,8 +264,14 @@ public void testInvokeEntitySetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.ADD, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.ADD, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -262,8 +284,7 @@ public void testInvokeEntityUnsetDomain() throws Exception { event.setChangeType(ChangeType.UPSERT); final Domains previousDomains = new Domains(); final Urn domainUrn = Urn.createFromString("urn:li:domain:test"); - previousDomains.setDomains(new UrnArray( - ImmutableList.of(domainUrn))); + previousDomains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); event.setPreviousAspectValue(GenericRecordUtils.serializeAspect(previousDomains)); event.setEntityUrn(Urn.createFromString(TEST_DATASET_URN)); event.setEntityType(DATASET_ENTITY_NAME); @@ -274,8 +295,14 @@ public void testInvokeEntityUnsetDomain() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DOMAIN, - ChangeOperation.REMOVE, domainUrn.toString(), ImmutableMap.of("domainUrn", domainUrn.toString()), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DOMAIN, + ChangeOperation.REMOVE, + domainUrn.toString(), + ImmutableMap.of("domainUrn", domainUrn.toString()), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -289,12 +316,11 @@ public void testInvokeEntityOwnerChange() throws Exception { final Ownership newOwners = new Ownership(); final Urn ownerUrn1 = Urn.createFromString("urn:li:corpuser:test1"); final Urn ownerUrn2 = Urn.createFromString("urn:li:corpuser:test2"); - newOwners.setOwners(new OwnerArray( - ImmutableList.of( - new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER) - ) - )); + newOwners.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(ownerUrn1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(ownerUrn2).setType(OwnershipType.BUSINESS_OWNER)))); final Ownership prevOwners = new Ownership(); prevOwners.setOwners(new OwnerArray()); event.setAspect(GenericRecordUtils.serializeAspect(newOwners)); @@ -308,16 +334,32 @@ public void testInvokeEntityOwnerChange() throws Exception { // Create Platform Event PlatformEvent platformEvent1 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn1.toString(), - ImmutableMap.of("ownerUrn", ownerUrn1.toString(), "ownerType", OwnershipType.TECHNICAL_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn1.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn1.toString(), + "ownerType", + OwnershipType.TECHNICAL_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent1, false); PlatformEvent platformEvent2 = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.OWNER, - ChangeOperation.ADD, ownerUrn2.toString(), - ImmutableMap.of("ownerUrn", ownerUrn2.toString(), "ownerType", OwnershipType.BUSINESS_OWNER.toString()), + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.OWNER, + ChangeOperation.ADD, + ownerUrn2.toString(), + ImmutableMap.of( + "ownerUrn", + ownerUrn2.toString(), + "ownerType", + OwnershipType.BUSINESS_OWNER.toString()), actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent2, true); } @@ -344,8 +386,14 @@ public void testInvokeEntityTermDeprecation() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.DEPRECATION, - ChangeOperation.MODIFY, null, ImmutableMap.of("status", "DEPRECATED"), actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.DEPRECATION, + ChangeOperation.MODIFY, + null, + ImmutableMap.of("status", "DEPRECATED"), + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -372,8 +420,14 @@ public void testInvokeEntityCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.CREATE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.CREATE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -400,8 +454,14 @@ public void testInvokeEntityHardDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.HARD_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.HARD_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -426,8 +486,14 @@ public void testInvokeEntitySoftDelete() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATASET_ENTITY_NAME, Urn.createFromString(TEST_DATASET_URN), ChangeCategory.LIFECYCLE, - ChangeOperation.SOFT_DELETE, null, null, actorUrn); + createChangeEvent( + DATASET_ENTITY_NAME, + Urn.createFromString(TEST_DATASET_URN), + ChangeCategory.LIFECYCLE, + ChangeOperation.SOFT_DELETE, + null, + null, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -464,8 +530,14 @@ public void testInvokeAssertionRunEventCreate() throws Exception { // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(ASSERTION_ENTITY_NAME, assertionUrn, ChangeCategory.RUN, ChangeOperation.COMPLETED, null, - paramsMap, actorUrn); + createChangeEvent( + ASSERTION_ENTITY_NAME, + assertionUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + paramsMap, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent); } @@ -487,25 +559,37 @@ public void testInvokeDataProcessInstanceRunEventStart() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_JOB_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, PARENT_INSTANCE_URN_KEY, TEST_DATA_PROCESS_INSTANCE_PARENT_URN, - DATA_JOB_URN_KEY, TEST_DATA_JOB_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_JOB_URN_KEY, + TEST_DATA_JOB_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.STARTED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.STARTED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -521,7 +605,8 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setChangeType(ChangeType.UPSERT); DataProcessInstanceRunEvent dataProcessInstanceRunEvent = - new DataProcessInstanceRunEvent().setStatus(DataProcessRunStatus.COMPLETE) + new DataProcessInstanceRunEvent() + .setStatus(DataProcessRunStatus.COMPLETE) .setAttempt(1) .setResult(new DataProcessInstanceRunResult().setType(RunResultType.SUCCESS)); @@ -529,24 +614,38 @@ public void testInvokeDataProcessInstanceRunEventComplete() throws Exception { event.setCreated(new AuditStamp().setActor(actorUrn).setTime(EVENT_TIME)); DataProcessInstanceRelationships relationships = - new DataProcessInstanceRelationships().setParentInstance( - Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) + new DataProcessInstanceRelationships() + .setParentInstance(Urn.createFromString(TEST_DATA_PROCESS_INSTANCE_PARENT_URN)) .setParentTemplate(Urn.createFromString(TEST_DATA_FLOW_URN)); final EntityResponse entityResponse = - buildEntityResponse(ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); + buildEntityResponse( + ImmutableMap.of(DATA_PROCESS_INSTANCE_RELATIONSHIPS_ASPECT_NAME, relationships)); Mockito.when(_mockClient.getV2(eq(dataProcessInstanceUrn), any())).thenReturn(entityResponse); _entityChangeEventHook.invoke(event); Map parameters = - ImmutableMap.of(ATTEMPT_KEY, 1, RUN_RESULT_KEY, RunResultType.SUCCESS.toString(), PARENT_INSTANCE_URN_KEY, - TEST_DATA_PROCESS_INSTANCE_PARENT_URN, DATA_FLOW_URN_KEY, TEST_DATA_FLOW_URN); + ImmutableMap.of( + ATTEMPT_KEY, + 1, + RUN_RESULT_KEY, + RunResultType.SUCCESS.toString(), + PARENT_INSTANCE_URN_KEY, + TEST_DATA_PROCESS_INSTANCE_PARENT_URN, + DATA_FLOW_URN_KEY, + TEST_DATA_FLOW_URN); // Create Platform Event PlatformEvent platformEvent = - createChangeEvent(DATA_PROCESS_INSTANCE_ENTITY_NAME, dataProcessInstanceUrn, ChangeCategory.RUN, - ChangeOperation.COMPLETED, null, parameters, actorUrn); + createChangeEvent( + DATA_PROCESS_INSTANCE_ENTITY_NAME, + dataProcessInstanceUrn, + ChangeCategory.RUN, + ChangeOperation.COMPLETED, + null, + parameters, + actorUrn); verifyProducePlatformEvent(_mockClient, platformEvent, false); } @@ -572,8 +671,14 @@ public void testInvokeIneligibleAspect() throws Exception { Mockito.verifyNoMoreInteractions(_mockClient); } - private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, ChangeCategory category, - ChangeOperation operation, String modifier, Map parameters, Urn actor) { + private PlatformEvent createChangeEvent( + String entityType, + Urn entityUrn, + ChangeCategory category, + ChangeOperation operation, + String modifier, + Map parameters, + Urn actor) { final EntityChangeEvent changeEvent = new EntityChangeEvent(); changeEvent.setEntityType(entityType); changeEvent.setEntityUrn(entityUrn); @@ -582,7 +687,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change if (modifier != null) { changeEvent.setModifier(modifier); } - changeEvent.setAuditStamp(new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + changeEvent.setAuditStamp( + new AuditStamp().setActor(actor).setTime(EntityChangeEventGeneratorHookTest.EVENT_TIME)); changeEvent.setVersion(0); if (parameters != null) { changeEvent.setParameters(new Parameters(new DataMap(parameters))); @@ -590,7 +696,8 @@ private PlatformEvent createChangeEvent(String entityType, Urn entityUrn, Change final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); + new PlatformEventHeader() + .setTimestampMillis(EntityChangeEventGeneratorHookTest.EVENT_TIME)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); return platformEvent; } @@ -611,7 +718,8 @@ private EntityChangeEventGeneratorRegistry createEntityChangeEventGeneratorRegis // Run change event generators registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(_mockClient)); return registry; } @@ -637,7 +745,8 @@ private EntityRegistry createMockEntityRegistry() { Mockito.when(datasetSpec.getAspectSpec(eq(DOMAINS_ASPECT_NAME))).thenReturn(mockDomains); AspectSpec mockDeprecation = createMockAspectSpec(Deprecation.class); - Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))).thenReturn(mockDeprecation); + Mockito.when(datasetSpec.getAspectSpec(eq(DEPRECATION_ASPECT_NAME))) + .thenReturn(mockDeprecation); AspectSpec mockDatasetKey = createMockAspectSpec(DatasetKey.class); Mockito.when(datasetSpec.getAspectSpec(eq(DATASET_KEY_ASPECT_NAME))).thenReturn(mockDatasetKey); @@ -647,29 +756,39 @@ private EntityRegistry createMockEntityRegistry() { // Build Assertion Entity Spec EntitySpec assertionSpec = Mockito.mock(EntitySpec.class); AspectSpec mockAssertionRunEvent = createMockAspectSpec(AssertionRunEvent.class); - Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))).thenReturn(mockAssertionRunEvent); + Mockito.when(assertionSpec.getAspectSpec(eq(ASSERTION_RUN_EVENT_ASPECT_NAME))) + .thenReturn(mockAssertionRunEvent); Mockito.when(registry.getEntitySpec(eq(ASSERTION_ENTITY_NAME))).thenReturn(assertionSpec); // Build Data Process Instance Entity Spec EntitySpec dataProcessInstanceSpec = Mockito.mock(EntitySpec.class); - AspectSpec mockDataProcessInstanceRunEvent = createMockAspectSpec(DataProcessInstanceRunEvent.class); - Mockito.when(dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) + AspectSpec mockDataProcessInstanceRunEvent = + createMockAspectSpec(DataProcessInstanceRunEvent.class); + Mockito.when( + dataProcessInstanceSpec.getAspectSpec(eq(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME))) .thenReturn(mockDataProcessInstanceRunEvent); - Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)).thenReturn(dataProcessInstanceSpec); + Mockito.when(registry.getEntitySpec(DATA_PROCESS_INSTANCE_ENTITY_NAME)) + .thenReturn(dataProcessInstanceSpec); return registry; } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent) throws Exception { verifyProducePlatformEvent(mockClient, platformEvent, true); } - private void verifyProducePlatformEvent(SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) throws Exception { + private void verifyProducePlatformEvent( + SystemRestliEntityClient mockClient, PlatformEvent platformEvent, boolean noMoreInteractions) + throws Exception { // Verify event has been emitted. - verify(mockClient, Mockito.times(1)).producePlatformEvent(eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), Mockito.anyString(), - argThat(new PlatformEventMatcher(platformEvent))); + verify(mockClient, Mockito.times(1)) + .producePlatformEvent( + eq(CHANGE_EVENT_PLATFORM_EVENT_NAME), + Mockito.anyString(), + argThat(new PlatformEventMatcher(platformEvent))); if (noMoreInteractions) { Mockito.verifyNoMoreInteractions(_mockClient); @@ -686,9 +805,10 @@ private EntityResponse buildEntityResponse(Map aspects) final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } -} \ No newline at end of file +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java index aafc87b2db5a2..8a3fb237e816f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/event/PlatformEventMatcher.java @@ -6,7 +6,6 @@ import javax.annotation.Nonnull; import org.mockito.ArgumentMatcher; - public class PlatformEventMatcher implements ArgumentMatcher { private final PlatformEvent _expected; @@ -16,26 +15,34 @@ public PlatformEventMatcher(@Nonnull final PlatformEvent expected) { @Override public boolean matches(@Nonnull final PlatformEvent actual) { - return _expected.getName().equals(actual.getName()) && _expected.getHeader() - .getTimestampMillis() - .equals(actual.getHeader().getTimestampMillis()) && payloadMatches(actual); + return _expected.getName().equals(actual.getName()) + && _expected + .getHeader() + .getTimestampMillis() + .equals(actual.getHeader().getTimestampMillis()) + && payloadMatches(actual); } public boolean payloadMatches(@Nonnull final PlatformEvent actual) { final EntityChangeEvent expectedChangeEvent = - GenericRecordUtils.deserializePayload(_expected.getPayload().getValue(), EntityChangeEvent.class); + GenericRecordUtils.deserializePayload( + _expected.getPayload().getValue(), EntityChangeEvent.class); final EntityChangeEvent actualChangeEvent = - GenericRecordUtils.deserializePayload(actual.getPayload().getValue(), EntityChangeEvent.class); - boolean requiredFieldsMatch = expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) - && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) - && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) - && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) - && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) - && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); + GenericRecordUtils.deserializePayload( + actual.getPayload().getValue(), EntityChangeEvent.class); + boolean requiredFieldsMatch = + expectedChangeEvent.getEntityType().equals(actualChangeEvent.getEntityType()) + && expectedChangeEvent.getEntityUrn().equals(actualChangeEvent.getEntityUrn()) + && expectedChangeEvent.getCategory().equals(actualChangeEvent.getCategory()) + && expectedChangeEvent.getOperation().equals(actualChangeEvent.getOperation()) + && expectedChangeEvent.getAuditStamp().equals(actualChangeEvent.getAuditStamp()) + && expectedChangeEvent.getVersion().equals(actualChangeEvent.getVersion()); boolean modifierMatches = - !expectedChangeEvent.hasModifier() || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); - boolean parametersMatch = !expectedChangeEvent.hasParameters() || expectedChangeEvent.getParameters() - .equals(actualChangeEvent.getParameters()); + !expectedChangeEvent.hasModifier() + || expectedChangeEvent.getModifier().equals(actualChangeEvent.getModifier()); + boolean parametersMatch = + !expectedChangeEvent.hasParameters() + || expectedChangeEvent.getParameters().equals(actualChangeEvent.getParameters()); return requiredFieldsMatch && modifierMatches && parametersMatch; } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java index a4aa00e228725..843502b2eefad 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.ingestion; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; + import com.datahub.metadata.ingestion.IngestionScheduler; import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; @@ -10,13 +13,8 @@ import com.linkedin.mxe.MetadataChangeLog; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; - import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.kafka.hook.EntityRegistryTestUtil.ENTITY_REGISTRY; - - public class IngestionSchedulerHookTest { private IngestionSchedulerHook _ingestionSchedulerHook; @@ -33,18 +31,22 @@ public void testInvoke() throws Exception { event.setAspectName(INGESTION_INFO_ASPECT_NAME); event.setChangeType(ChangeType.UPSERT); final DataHubIngestionSourceInfo newInfo = new DataHubIngestionSourceInfo(); - newInfo.setSchedule(new DataHubIngestionSourceSchedule().setInterval("0 1 1 * *").setTimezone("UTC")); // Run every monday + newInfo.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval("0 1 1 * *") + .setTimezone("UTC")); // Run every monday newInfo.setType("redshift"); newInfo.setName("My Redshift Source"); - newInfo.setConfig(new DataHubIngestionSourceConfig() - .setExecutorId("default") - .setRecipe("{ type }") - .setVersion("0.8.18") - ); + newInfo.setConfig( + new DataHubIngestionSourceConfig() + .setExecutorId("default") + .setRecipe("{ type }") + .setVersion("0.8.18")); event.setAspect(GenericRecordUtils.serializeAspect(newInfo)); event.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .scheduleNextIngestionSourceExecution(Mockito.any(), Mockito.any()); } @Test @@ -55,7 +57,8 @@ public void testInvokeDeleteKeyAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -66,7 +69,8 @@ public void testInvokeDeleteInfoAspect() throws Exception { event2.setChangeType(ChangeType.DELETE); event2.setEntityUrn(Urn.createFromString("urn:li:dataHubIngestionSourceUrn:0")); _ingestionSchedulerHook.invoke(event2); - Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)).unscheduleNextIngestionSourceExecution(Mockito.any()); + Mockito.verify(_ingestionSchedulerHook.scheduler(), Mockito.times(1)) + .unscheduleNextIngestionSourceExecution(Mockito.any()); } @Test @@ -78,5 +82,3 @@ public void testInvokeWrongAspect() { Mockito.verifyNoInteractions(_ingestionSchedulerHook.scheduler()); } } - - diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java index 93e98b7343cd4..d4c6d122a6689 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/siblings/SiblingAssociationHookTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.kafka.hook.siblings; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.FabricType; @@ -31,16 +34,11 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; - - public class SiblingAssociationHookTest { private SiblingAssociationHook _siblingAssociationHook; SystemRestliEntityClient _mockEntityClient; @@ -48,11 +46,15 @@ public class SiblingAssociationHookTest { @BeforeMethod public void setupTest() { - EntityRegistry registry = new ConfigEntityRegistry( - SiblingAssociationHookTest.class.getClassLoader().getResourceAsStream("test-entity-registry-siblings.yml")); + EntityRegistry registry = + new ConfigEntityRegistry( + SiblingAssociationHookTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry-siblings.yml")); _mockEntityClient = Mockito.mock(SystemRestliEntityClient.class); _mockSearchService = Mockito.mock(EntitySearchService.class); - _siblingAssociationHook = new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); + _siblingAssociationHook = + new SiblingAssociationHook(registry, _mockEntityClient, _mockSearchService, true); _siblingAssociationHook.setEnabled(true); } @@ -61,23 +63,28 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { SubTypes mockSourceSubtypesAspect = new SubTypes(); mockSourceSubtypesAspect.setTypeNames(new StringArray(ImmutableList.of("source"))); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME) - )).thenReturn(mockResponse); - - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -85,34 +92,52 @@ public void testInvokeWhenThereIsAPairWithDbtSourceNode() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -123,20 +148,27 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); EnvelopedAspectMap mockResponseMap = new EnvelopedAspectMap(); - mockResponseMap.put(SUB_TYPES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); + mockResponseMap.put( + SUB_TYPES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(mockSourceSubtypesAspect.data()))); EntityResponse mockResponse = new EntityResponse(); mockResponse.setAspects(mockResponseMap); Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - Mockito.when( - _mockEntityClient.getV2( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), - ImmutableSet.of(SUB_TYPES_ASPECT_NAME))).thenReturn(mockResponse); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + _mockEntityClient.getV2( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)"), + ImmutableSet.of(SUB_TYPES_ASPECT_NAME))) + .thenReturn(mockResponse); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); @@ -145,65 +177,96 @@ public void testInvokeWhenThereIsNoPairWithDbtModel() throws Exception { upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(0)) + .ingestProposal(Mockito.eq(proposal), eq(true)); } @Test public void testInvokeWhenThereIsAPairWithBigqueryDownstreamNode() throws Exception { Mockito.when(_mockEntityClient.exists(Mockito.any())).thenReturn(true); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream upstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream upstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(upstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } @Test @@ -214,126 +277,176 @@ public void testInvokeWhenThereIsAKeyBeingReingested() throws Exception { SearchEntityArray returnEntityArray = new SearchEntityArray(); SearchEntity returnArrayValue = new SearchEntity(); returnArrayValue.setEntity( - Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)") - ); + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); returnEntityArray.add(returnArrayValue); returnSearchResult.setEntities(returnEntityArray); Mockito.when( - _mockSearchService.search( - any(), anyString(), any(), any(), anyInt(), anyInt(), eq(new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)) - )).thenReturn(returnSearchResult); - - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); + _mockSearchService.search( + any(), + anyString(), + any(), + any(), + anyInt(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true)))) + .thenReturn(returnSearchResult); + + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, DATASET_KEY_ASPECT_NAME, ChangeType.UPSERT); final DatasetKey datasetKey = new DatasetKey(); datasetKey.setName("my-proj.jaffle_shop.customers"); datasetKey.setOrigin(FabricType.PROD); datasetKey.setPlatform(DataPlatformUrn.createFromString("urn:li:dataPlatform:bigquery")); event.setAspect(GenericRecordUtils.serializeAspect(datasetKey)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - final Siblings dbtSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(true); + final Siblings dbtSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(true); final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); + proposal.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")); proposal.setEntityType(DATASET_ENTITY_NAME); proposal.setAspectName(SIBLINGS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(dbtSiblingsAspect)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), eq(true)); - final Siblings sourceSiblingsAspect = new Siblings() - .setSiblings(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) - .setPrimary(false); + final Siblings sourceSiblingsAspect = + new Siblings() + .setSiblings( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.jaffle_shop.customers,PROD)")))) + .setPrimary(false); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); - proposal2.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + proposal2.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); proposal2.setEntityType(DATASET_ENTITY_NAME); proposal2.setAspectName(SIBLINGS_ASPECT_NAME); proposal2.setAspect(GenericRecordUtils.serializeAspect(sourceSiblingsAspect)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockEntityClient, Mockito.times(1)).ingestProposal(Mockito.eq(proposal2), eq(true)); + Mockito.verify(_mockEntityClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal2), eq(true)); } + @Test public void testInvokeWhenSourceUrnHasTwoDbtUpstreams() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream1 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); - Upstream dbtUpstream2 = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream2 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity2,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream1); upstreamArray.add(dbtUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); - - } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsOneDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream dbtUpstream = createUpstream("urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", DatasetLineageType.TRANSFORMED); + Upstream dbtUpstream = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:dbt,my-proj.source_entity1,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(dbtUpstream); upstreamArray.add(snowflakeUpstream); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(2)).ingestProposal(Mockito.any(), eq(true)); } @Test public void testInvokeWhenSourceUrnHasTwoUpstreamsNoDbt() throws Exception { - MetadataChangeLog event = createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); + MetadataChangeLog event = + createEvent(DATASET_ENTITY_NAME, UPSTREAM_LINEAGE_ASPECT_NAME, ChangeType.UPSERT); final UpstreamLineage upstreamLineage = new UpstreamLineage(); final UpstreamArray upstreamArray = new UpstreamArray(); - Upstream snowflakeUpstream1 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", DatasetLineageType.TRANSFORMED); + Upstream snowflakeUpstream1 = + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop1.customers,PROD)", + DatasetLineageType.TRANSFORMED); Upstream snowflakeUpstream2 = - createUpstream("urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", DatasetLineageType.TRANSFORMED); + createUpstream( + "urn:li:dataset:(urn:li:dataPlatform:snowflake,my-proj.jaffle_shop2.customers,PROD)", + DatasetLineageType.TRANSFORMED); upstreamArray.add(snowflakeUpstream1); upstreamArray.add(snowflakeUpstream2); upstreamLineage.setUpstreams(upstreamArray); event.setAspect(GenericRecordUtils.serializeAspect(upstreamLineage)); - event.setEntityUrn(Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); + event.setEntityUrn( + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:bigquery,my-proj.jaffle_shop.customers,PROD)")); _siblingAssociationHook.invoke(event); - Mockito.verify(_mockEntityClient, Mockito.times(0)).ingestProposal(Mockito.any(), eq(true)); } - private MetadataChangeLog createEvent(String entityType, String aspectName, ChangeType changeType) { + private MetadataChangeLog createEvent( + String entityType, String aspectName, ChangeType changeType) { MetadataChangeLog event = new MetadataChangeLog(); event.setEntityType(entityType); event.setAspectName(aspectName); event.setChangeType(changeType); return event; } + private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { final Upstream upstream = new Upstream(); @@ -346,6 +459,4 @@ private Upstream createUpstream(String urn, DatasetLineageType upstreamType) { return upstream; } - - - } +} diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java index b46308873ca16..6d1bdca9c116f 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka.hook.spring; +import static org.testng.AssertJUnit.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.kafka.MetadataChangeLogProcessor; import com.linkedin.metadata.kafka.hook.UpdateIndicesHook; @@ -13,29 +15,34 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - -@SpringBootTest(classes = { - MCLSpringTestConfiguration.class, ConfigurationProvider.class - }, +@SpringBootTest( + classes = {MCLSpringTestConfiguration.class, ConfigurationProvider.class}, properties = { "ingestionScheduler.enabled=false", "configEntityRegistry.path=../../metadata-jobs/mae-consumer/src/test/resources/test-entity-registry.yml", "kafka.schemaRegistry.type=INTERNAL" - }) -@TestPropertySource(locations = "classpath:/application.yml", properties = { - "MCL_CONSUMER_ENABLED=true" -}) + }) +@TestPropertySource( + locations = "classpath:/application.yml", + properties = {"MCL_CONSUMER_ENABLED=true"}) @EnableAutoConfiguration(exclude = {CassandraAutoConfiguration.class}) public class MCLSpringTest extends AbstractTestNGSpringContextTests { @Test public void testHooks() { - MetadataChangeLogProcessor metadataChangeLogProcessor = applicationContext.getBean(MetadataChangeLogProcessor.class); - assertTrue(metadataChangeLogProcessor.getHooks().stream().noneMatch(hook -> hook instanceof IngestionSchedulerHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof UpdateIndicesHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof SiblingAssociationHook)); - assertTrue(metadataChangeLogProcessor.getHooks().stream().anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); + MetadataChangeLogProcessor metadataChangeLogProcessor = + applicationContext.getBean(MetadataChangeLogProcessor.class); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .noneMatch(hook -> hook instanceof IngestionSchedulerHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof UpdateIndicesHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof SiblingAssociationHook)); + assertTrue( + metadataChangeLogProcessor.getHooks().stream() + .anyMatch(hook -> hook instanceof EntityChangeEventGeneratorHook)); } } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 1d9c17c676990..9d646819932e9 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -19,40 +19,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Configuration -@ComponentScan(basePackages = { - "com.linkedin.metadata.kafka", - "com.linkedin.gms.factory.entity.update.indices" -}) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.kafka", + "com.linkedin.gms.factory.entity.update.indices" + }) public class MCLSpringTestConfiguration { - @MockBean - public EntityRegistry entityRegistry; + @MockBean public EntityRegistry entityRegistry; - @MockBean - public ElasticSearchGraphService graphService; + @MockBean public ElasticSearchGraphService graphService; - @MockBean - public TimeseriesAspectService timeseriesAspectService; + @MockBean public TimeseriesAspectService timeseriesAspectService; - @MockBean - public SystemMetadataService systemMetadataService; + @MockBean public SystemMetadataService systemMetadataService; - @MockBean - public SearchDocumentTransformer searchDocumentTransformer; + @MockBean public SearchDocumentTransformer searchDocumentTransformer; - @MockBean - public IngestionScheduler ingestionScheduler; + @MockBean public IngestionScheduler ingestionScheduler; @MockBean(name = "systemRestliEntityClient") public SystemRestliEntityClient entityClient; - @MockBean - public ElasticSearchService searchService; + @MockBean public ElasticSearchService searchService; - @MockBean - public Authentication systemAuthentication; + @MockBean public Authentication systemAuthentication; @MockBean(name = "dataHubUpgradeKafkaListener") public DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener; @@ -63,9 +55,7 @@ public class MCLSpringTestConfiguration { @MockBean(name = "duheKafkaConsumerFactory") public DefaultKafkaConsumerFactory defaultKafkaConsumerFactory; - @MockBean - public SchemaRegistryService schemaRegistryService; + @MockBean public SchemaRegistryService schemaRegistryService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java index f0c59240a9ba4..0d8192a823865 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/kafka/MceConsumerApplication.java @@ -12,34 +12,35 @@ import org.springframework.context.annotation.FilterType; import org.springframework.context.annotation.PropertySource; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") -@SpringBootApplication(exclude = { - ElasticsearchRestClientAutoConfiguration.class, - CassandraAutoConfiguration.class, - SolrHealthContributorAutoConfiguration.class -}) -@ComponentScan(basePackages = { - "com.linkedin.metadata.boot.kafka", - "com.linkedin.gms.factory.auth", - "com.linkedin.gms.factory.common", - "com.linkedin.gms.factory.config", - "com.linkedin.gms.factory.entity", - "com.linkedin.gms.factory.entityregistry", - "com.linkedin.gms.factory.kafka", - "com.linkedin.gms.factory.search", - "com.linkedin.gms.factory.secret", - "com.linkedin.gms.factory.timeseries", - "com.linkedin.restli.server", - "com.linkedin.metadata.restli", - "com.linkedin.metadata.kafka", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = { - ScheduledAnalyticsFactory.class, - RestliEntityClientFactory.class - }) -}) +@SpringBootApplication( + exclude = { + ElasticsearchRestClientAutoConfiguration.class, + CassandraAutoConfiguration.class, + SolrHealthContributorAutoConfiguration.class + }) +@ComponentScan( + basePackages = { + "com.linkedin.metadata.boot.kafka", + "com.linkedin.gms.factory.auth", + "com.linkedin.gms.factory.common", + "com.linkedin.gms.factory.config", + "com.linkedin.gms.factory.entity", + "com.linkedin.gms.factory.entityregistry", + "com.linkedin.gms.factory.kafka", + "com.linkedin.gms.factory.search", + "com.linkedin.gms.factory.secret", + "com.linkedin.gms.factory.timeseries", + "com.linkedin.restli.server", + "com.linkedin.metadata.restli", + "com.linkedin.metadata.kafka", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = {ScheduledAnalyticsFactory.class, RestliEntityClientFactory.class}) + }) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MceConsumerApplication { diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java index abd73d03a7b55..990e0df102d37 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/EbeanServerConfig.java @@ -1,74 +1,71 @@ package com.linkedin.metadata.restli; +import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; + import io.ebean.datasource.DataSourceConfig; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.gms.factory.common.LocalEbeanServerConfigFactory.getListenerToTrackCounts; - @Configuration public class EbeanServerConfig { - @Value("${ebean.username}") - private String ebeanDatasourceUsername; - - @Value("${ebean.password}") - private String ebeanDatasourcePassword; + @Value("${ebean.username}") + private String ebeanDatasourceUsername; - @Value("${ebean.driver}") - private String ebeanDatasourceDriver; + @Value("${ebean.password}") + private String ebeanDatasourcePassword; - @Value("${ebean.minConnections:1}") - private Integer ebeanMinConnections; + @Value("${ebean.driver}") + private String ebeanDatasourceDriver; - @Value("${ebean.maxInactiveTimeSeconds:120}") - private Integer ebeanMaxInactiveTimeSecs; + @Value("${ebean.minConnections:1}") + private Integer ebeanMinConnections; - @Value("${ebean.maxAgeMinutes:120}") - private Integer ebeanMaxAgeMinutes; + @Value("${ebean.maxInactiveTimeSeconds:120}") + private Integer ebeanMaxInactiveTimeSecs; - @Value("${ebean.leakTimeMinutes:15}") - private Integer ebeanLeakTimeMinutes; + @Value("${ebean.maxAgeMinutes:120}") + private Integer ebeanMaxAgeMinutes; - @Value("${ebean.waitTimeoutMillis:1000}") - private Integer ebeanWaitTimeoutMillis; + @Value("${ebean.leakTimeMinutes:15}") + private Integer ebeanLeakTimeMinutes; - @Value("${ebean.autoCreateDdl:false}") - private Boolean ebeanAutoCreate; + @Value("${ebean.waitTimeoutMillis:1000}") + private Integer ebeanWaitTimeoutMillis; - @Value("${ebean.postgresUseIamAuth:false}") - private Boolean postgresUseIamAuth; + @Value("${ebean.autoCreateDdl:false}") + private Boolean ebeanAutoCreate; + @Value("${ebean.postgresUseIamAuth:false}") + private Boolean postgresUseIamAuth; - @Bean("ebeanDataSourceConfig") - @Primary - public DataSourceConfig buildDataSourceConfig( - @Value("${ebean.url}") String dataSourceUrl, - @Qualifier("parseqEngineThreads") int ebeanMaxConnections - ) { - DataSourceConfig dataSourceConfig = new DataSourceConfig(); - dataSourceConfig.setUsername(ebeanDatasourceUsername); - dataSourceConfig.setPassword(ebeanDatasourcePassword); - dataSourceConfig.setUrl(dataSourceUrl); - dataSourceConfig.setDriver(ebeanDatasourceDriver); - dataSourceConfig.setMinConnections(ebeanMinConnections); - dataSourceConfig.setMaxConnections(ebeanMaxConnections); - dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); - dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); - dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); - dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); - dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); - // Adding IAM auth access for AWS Postgres - if (postgresUseIamAuth) { - Map custom = new HashMap<>(); - custom.put("wrapperPlugins", "iam"); - dataSourceConfig.setCustomProperties(custom); - } - return dataSourceConfig; + @Bean("ebeanDataSourceConfig") + @Primary + public DataSourceConfig buildDataSourceConfig( + @Value("${ebean.url}") String dataSourceUrl, + @Qualifier("parseqEngineThreads") int ebeanMaxConnections) { + DataSourceConfig dataSourceConfig = new DataSourceConfig(); + dataSourceConfig.setUsername(ebeanDatasourceUsername); + dataSourceConfig.setPassword(ebeanDatasourcePassword); + dataSourceConfig.setUrl(dataSourceUrl); + dataSourceConfig.setDriver(ebeanDatasourceDriver); + dataSourceConfig.setMinConnections(ebeanMinConnections); + dataSourceConfig.setMaxConnections(ebeanMaxConnections); + dataSourceConfig.setMaxInactiveTimeSecs(ebeanMaxInactiveTimeSecs); + dataSourceConfig.setMaxAgeMinutes(ebeanMaxAgeMinutes); + dataSourceConfig.setLeakTimeMinutes(ebeanLeakTimeMinutes); + dataSourceConfig.setWaitTimeoutMillis(ebeanWaitTimeoutMillis); + dataSourceConfig.setListener(getListenerToTrackCounts("mce-consumer")); + // Adding IAM auth access for AWS Postgres + if (postgresUseIamAuth) { + Map custom = new HashMap<>(); + custom.put("wrapperPlugins", "iam"); + dataSourceConfig.setCustomProperties(custom); } + return dataSourceConfig; + } } diff --git a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java index 563cc5ce04c66..4d7e10d694c4e 100644 --- a/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java +++ b/metadata-jobs/mce-consumer-job/src/main/java/com/linkedin/metadata/restli/RestliServletConfig.java @@ -6,6 +6,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import com.linkedin.restli.server.RestliHandlerServlet; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.web.servlet.FilterRegistrationBean; @@ -15,54 +16,53 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @Configuration @Import({SystemAuthenticationFactory.class}) public class RestliServletConfig { - @Value("${server.port}") - private int configuredPort; + @Value("${server.port}") + private int configuredPort; - @Value("${entityClient.retryInterval:2}") - private int retryInterval; + @Value("${entityClient.retryInterval:2}") + private int retryInterval; - @Value("${entityClient.numRetries:3}") - private int numRetries; + @Value("${entityClient.numRetries:3}") + private int numRetries; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = String.format("http://localhost:%s/gms/", configuredPort); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = String.format("http://localhost:%s/gms/", configuredPort); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); + } - @Bean("restliServletRegistration") - public ServletRegistrationBean restliServletRegistration( - @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { - return new ServletRegistrationBean<>(servlet, "/gms/*"); - } + @Bean("restliServletRegistration") + public ServletRegistrationBean restliServletRegistration( + @Qualifier("restliHandlerServlet") RestliHandlerServlet servlet) { + return new ServletRegistrationBean<>(servlet, "/gms/*"); + } - @Bean - public RestliHandlerServlet restliHandlerServlet() { - return new RestliHandlerServlet(); - } + @Bean + public RestliHandlerServlet restliHandlerServlet() { + return new RestliHandlerServlet(); + } - @Bean - public FilterRegistrationBean authenticationFilterRegistrationBean( - @Qualifier("restliServletRegistration") ServletRegistrationBean servlet - ) { - FilterRegistrationBean registrationBean = new FilterRegistrationBean<>(); - registrationBean.addServletRegistrationBeans(servlet); - registrationBean.setOrder(1); - return registrationBean; - } + @Bean + public FilterRegistrationBean authenticationFilterRegistrationBean( + @Qualifier("restliServletRegistration") + ServletRegistrationBean servlet) { + FilterRegistrationBean registrationBean = new FilterRegistrationBean<>(); + registrationBean.addServletRegistrationBeans(servlet); + registrationBean.setOrder(1); + return registrationBean; + } - @Bean - public AuthenticationFilter authenticationFilter(FilterRegistrationBean filterReg) { - AuthenticationFilter filter = new AuthenticationFilter(); - filterReg.setFilter(filter); - return filter; - } + @Bean + public AuthenticationFilter authenticationFilter( + FilterRegistrationBean filterReg) { + AuthenticationFilter filter = new AuthenticationFilter(); + filterReg.setFilter(filter); + return filter; + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java index c23cf1ea3d165..714c7b899ff49 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTest.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.kafka; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.testng.AssertJUnit.assertTrue; + import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import org.springframework.beans.factory.annotation.Autowired; @@ -9,30 +13,25 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.when; -import static org.testng.AssertJUnit.assertTrue; - @ActiveProfiles("test") -@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, - classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) +@SpringBootTest( + webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, + classes = {MceConsumerApplication.class, MceConsumerApplicationTestConfiguration.class}) public class MceConsumerApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @Autowired - private EntityService _mockEntityService; + @Autowired private EntityService _mockEntityService; - @Test - public void testRestliServletConfig() { - RestoreIndicesResult mockResult = new RestoreIndicesResult(); - mockResult.setRowsMigrated(100); - when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); + @Test + public void testRestliServletConfig() { + RestoreIndicesResult mockResult = new RestoreIndicesResult(); + mockResult.setRowsMigrated(100); + when(_mockEntityService.restoreIndices(any(), any())).thenReturn(mockResult); - String response = this.restTemplate - .postForObject("/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); - assertTrue(response.contains(mockResult.toString())); - } + String response = + this.restTemplate.postForObject( + "/gms/aspects?action=restoreIndices", "{\"urn\":\"\"}", String.class); + assertTrue(response.contains(mockResult.toString())); + } } diff --git a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java index bee1441b5aaf6..1a44265c7a92a 100644 --- a/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mce-consumer-job/src/test/java/com/linkedin/metadata/kafka/MceConsumerApplicationTestConfiguration.java @@ -13,6 +13,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; import io.ebean.Database; +import java.net.URI; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -21,44 +22,33 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; -import java.net.URI; - @TestConfiguration @Import(value = {SystemAuthenticationFactory.class}) public class MceConsumerApplicationTestConfiguration { - @Autowired - private TestRestTemplate restTemplate; + @Autowired private TestRestTemplate restTemplate; - @MockBean - public KafkaHealthChecker kafkaHealthChecker; + @MockBean public KafkaHealthChecker kafkaHealthChecker; - @MockBean - public EntityService _entityService; + @MockBean public EntityService _entityService; - @Bean("restliEntityClient") - @Primary - public RestliEntityClient restliEntityClient() { - String selfUri = restTemplate.getRootUri(); - final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); - return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); - } + @Bean("restliEntityClient") + @Primary + public RestliEntityClient restliEntityClient() { + String selfUri = restTemplate.getRootUri(); + final Client restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(selfUri), null); + return new RestliEntityClient(restClient, new ExponentialBackoff(1), 1); + } - @MockBean - public Database ebeanServer; + @MockBean public Database ebeanServer; - @MockBean - protected TimeseriesAspectService timeseriesAspectService; + @MockBean protected TimeseriesAspectService timeseriesAspectService; - @MockBean - protected EntityRegistry entityRegistry; + @MockBean protected EntityRegistry entityRegistry; - @MockBean - protected ConfigEntityRegistry configEntityRegistry; + @MockBean protected ConfigEntityRegistry configEntityRegistry; - @MockBean - protected SiblingGraphService siblingGraphService; + @MockBean protected SiblingGraphService siblingGraphService; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java index 76e13d5e4da23..b04ecc7761eb6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/McpConsumerConfig.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.kafka; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; @@ -13,9 +15,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static com.linkedin.metadata.Constants.*; - - @Controller @Import(GitVersionFactory.class) public class McpConsumerConfig { @@ -24,10 +23,15 @@ public class McpConsumerConfig { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } public McpConsumerConfig(GitVersion gitVersion) throws JsonProcessingException { diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java index c30dd6e6f96dc..217b826689c7c 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeEventsProcessor.java @@ -6,8 +6,8 @@ import com.linkedin.entity.Entity; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.snapshot.Snapshot; @@ -18,7 +18,6 @@ import com.linkedin.r2.RemoteInvocationException; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.NonNull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -35,27 +34,38 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @EnableKafka @RequiredArgsConstructor public class MetadataChangeEventsProcessor { - @NonNull - private final Authentication systemAuthentication; + @NonNull private final Authentication systemAuthentication; private final SystemRestliEntityClient entityClient; private final Producer kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_EVENT + "}}") + @Value( + "${FAILED_METADATA_CHANGE_EVENT_NAME:${KAFKA_FMCE_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_EVENT + + "}}") private String fmceTopicName; - @KafkaListener(id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", topics = - "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + Topics.METADATA_CHANGE_EVENT + "}}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_EVENT_KAFKA_CONSUMER_GROUP_ID:mce-consumer-job-client}", + topics = + "${METADATA_CHANGE_EVENT_NAME:${KAFKA_MCE_TOPIC_NAME:" + + Topics.METADATA_CHANGE_EVENT + + "}}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -77,21 +87,26 @@ public void consume(final ConsumerRecord consumerRecord) } private void sendFailedMCE(@Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { - final FailedMetadataChangeEvent failedMetadataChangeEvent = createFailedMCEEvent(event, throwable); + final FailedMetadataChangeEvent failedMetadataChangeEvent = + createFailedMCEEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCE(failedMetadataChangeEvent); log.debug("Sending FailedMessages to topic - {}", fmceTopicName); - log.info("Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); + log.info( + "Error while processing MCE: FailedMetadataChangeEvent - {}", failedMetadataChangeEvent); kafkaProducer.send(new ProducerRecord<>(fmceTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", - e.getStackTrace(), failedMetadataChangeEvent); + log.error( + "Error while sending FailedMetadataChangeEvent: Exception - {}, FailedMetadataChangeEvent - {}", + e.getStackTrace(), + failedMetadataChangeEvent); } } @Nonnull - private FailedMetadataChangeEvent createFailedMCEEvent(@Nonnull MetadataChangeEvent event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeEvent createFailedMCEEvent( + @Nonnull MetadataChangeEvent event, @Nonnull Throwable throwable) { final FailedMetadataChangeEvent fmce = new FailedMetadataChangeEvent(); fmce.setError(ExceptionUtils.getStackTrace(throwable)); fmce.setMetadataChangeEvent(event); @@ -103,6 +118,7 @@ private void processProposedSnapshot(@Nonnull MetadataChangeEvent metadataChange final Snapshot snapshotUnion = metadataChangeEvent.getProposedSnapshot(); final Entity entity = new Entity().setValue(snapshotUnion); // TODO: GMS Auth Part 2: Get the actor identity from the event header itself. - entityClient.updateWithSystemMetadata(entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); + entityClient.updateWithSystemMetadata( + entity, metadataChangeEvent.getSystemMetadata(), this.systemAuthentication); } } diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java index 79f8c90af8ec7..b487ded6a9439 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/MetadataChangeProposalsProcessor.java @@ -4,8 +4,8 @@ import com.codahale.metrics.MetricRegistry; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; -import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; import com.linkedin.metadata.EventUtils; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -14,7 +14,6 @@ import com.linkedin.mxe.Topics; import java.io.IOException; import javax.annotation.Nonnull; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; @@ -30,10 +29,13 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component -@Import({RestliEntityClientFactory.class, KafkaEventConsumerFactory.class, DataHubKafkaProducerFactory.class}) +@Import({ + RestliEntityClientFactory.class, + KafkaEventConsumerFactory.class, + DataHubKafkaProducerFactory.class +}) @Conditional(MetadataChangeProposalProcessorCondition.class) @EnableKafka @RequiredArgsConstructor @@ -42,14 +44,19 @@ public class MetadataChangeProposalsProcessor { private final SystemRestliEntityClient entityClient; private final Producer kafkaProducer; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String fmcpTopicName; - @KafkaListener(id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", topics = - "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL - + "}", containerFactory = "kafkaEventConsumer") + @KafkaListener( + id = "${METADATA_CHANGE_PROPOSAL_KAFKA_CONSUMER_GROUP_ID:generic-mce-consumer-job-client}", + topics = "${METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.METADATA_CHANGE_PROPOSAL + "}", + containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord consumerRecord) { kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); @@ -69,21 +76,27 @@ public void consume(final ConsumerRecord consumerRecord) } private void sendFailedMCP(@Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { - final FailedMetadataChangeProposal failedMetadataChangeProposal = createFailedMCPEvent(event, throwable); + final FailedMetadataChangeProposal failedMetadataChangeProposal = + createFailedMCPEvent(event, throwable); try { - final GenericRecord genericFailedMCERecord = EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); + final GenericRecord genericFailedMCERecord = + EventUtils.pegasusToAvroFailedMCP(failedMetadataChangeProposal); log.debug("Sending FailedMessages to topic - {}", fmcpTopicName); - log.info("Error while processing FMCP: FailedMetadataChangeProposal - {}", failedMetadataChangeProposal); + log.info( + "Error while processing FMCP: FailedMetadataChangeProposal - {}", + failedMetadataChangeProposal); kafkaProducer.send(new ProducerRecord<>(fmcpTopicName, genericFailedMCERecord)); } catch (IOException e) { - log.error("Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", - e.getStackTrace(), failedMetadataChangeProposal); + log.error( + "Error while sending FailedMetadataChangeProposal: Exception - {}, FailedMetadataChangeProposal - {}", + e.getStackTrace(), + failedMetadataChangeProposal); } } @Nonnull - private FailedMetadataChangeProposal createFailedMCPEvent(@Nonnull MetadataChangeProposal event, - @Nonnull Throwable throwable) { + private FailedMetadataChangeProposal createFailedMCPEvent( + @Nonnull MetadataChangeProposal event, @Nonnull Throwable throwable) { final FailedMetadataChangeProposal fmcp = new FailedMetadataChangeProposal(); fmcp.setError(ExceptionUtils.getStackTrace(throwable)); fmcp.setMetadataChangeProposal(event); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java index 1b69b1113bdb1..2bbc8304f2e27 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/ApplicationStartupListener.java @@ -4,6 +4,7 @@ import com.linkedin.metadata.boot.BootstrapManager; import com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationListener; @@ -12,25 +13,22 @@ import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import javax.annotation.Nonnull; - - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Slf4j @Component @Conditional(MetadataChangeProposalProcessorCondition.class) public class ApplicationStartupListener implements ApplicationListener { - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final DataHubUpgradeKafkaListener _dataHubUpgradeKafkaListener; private final ConfigurationProvider _configurationProvider; private final BootstrapManager _mcpBootstrapManager; public ApplicationStartupListener( - @Qualifier("dataHubUpgradeKafkaListener") DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, + @Qualifier("dataHubUpgradeKafkaListener") + DataHubUpgradeKafkaListener dataHubUpgradeKafkaListener, ConfigurationProvider configurationProvider, @Qualifier("mcpBootstrapManager") BootstrapManager bootstrapManager) { _dataHubUpgradeKafkaListener = dataHubUpgradeKafkaListener; diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java index 44e5c7cff8661..0220764cd99d6 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/boot/MCPBootstrapManagerFactory.java @@ -7,6 +7,8 @@ import com.linkedin.metadata.boot.dependencies.BootstrapDependency; import com.linkedin.metadata.boot.steps.WaitForSystemUpdateStep; import com.linkedin.metadata.kafka.config.MetadataChangeProposalProcessorCondition; +import java.util.List; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; -import java.util.List; - - @Configuration @Conditional(MetadataChangeProposalProcessorCondition.class) public class MCPBootstrapManagerFactory { @@ -27,8 +25,7 @@ public class MCPBootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -37,8 +34,8 @@ public class MCPBootstrapManagerFactory { @Scope("singleton") @Nonnull protected BootstrapManager createInstance() { - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); final List finalSteps = ImmutableList.of(waitForSystemUpdateStep); diff --git a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java index 9dd265736bfc2..1cdb05b04e0ac 100644 --- a/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java +++ b/metadata-jobs/mce-consumer/src/main/java/com/linkedin/metadata/kafka/config/MetadataChangeProposalProcessorCondition.java @@ -5,12 +5,11 @@ import org.springframework.core.env.Environment; import org.springframework.core.type.AnnotatedTypeMetadata; - public class MetadataChangeProposalProcessorCondition implements Condition { @Override public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { Environment env = context.getEnvironment(); - return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) || "true".equals( - env.getProperty("MCP_CONSUMER_ENABLED")); + return "true".equals(env.getProperty("MCE_CONSUMER_ENABLED")) + || "true".equals(env.getProperty("MCP_CONSUMER_ENABLED")); } } diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java index aa09679cb08a8..84d4f4ae4c095 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/PlatformEventProcessor.java @@ -21,7 +21,6 @@ import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; - @Slf4j @Component @Conditional(PlatformEventProcessorCondition.class) @@ -30,7 +29,8 @@ public class PlatformEventProcessor { private final List hooks; - private final Histogram kafkaLagStats = MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); + private final Histogram kafkaLagStats = + MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), "kafkaLag")); @Autowired public PlatformEventProcessor() { @@ -39,8 +39,9 @@ public PlatformEventProcessor() { this.hooks.forEach(PlatformEventHook::init); } - @KafkaListener(id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", topics = { - "${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}" }, + @KafkaListener( + id = "${PLATFORM_EVENT_KAFKA_CONSUMER_GROUP_ID:generic-platform-event-job-client}", + topics = {"${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}"}, containerFactory = "kafkaEventConsumer") public void consume(final ConsumerRecord consumerRecord) { @@ -48,14 +49,17 @@ public void consume(final ConsumerRecord consumerRecord) kafkaLagStats.update(System.currentTimeMillis() - consumerRecord.timestamp()); final GenericRecord record = consumerRecord.value(); - log.debug("Got Generic PE on topic: {}, partition: {}, offset: {}", consumerRecord.topic(), consumerRecord.partition(), consumerRecord.offset()); + log.debug( + "Got Generic PE on topic: {}, partition: {}, offset: {}", + consumerRecord.topic(), + consumerRecord.partition(), + consumerRecord.offset()); MetricUtils.counter(this.getClass(), "received_pe_count").inc(); PlatformEvent event; try { event = EventUtils.avroToPegasusPE(record); - log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", - event.getName()); + log.debug("Successfully converted Avro PE to Pegasus PE. name: {}", event.getName()); } catch (Exception e) { MetricUtils.counter(this.getClass(), "avro_to_pegasus_conversion_failure").inc(); log.error("Error deserializing message due to: ", e); @@ -66,8 +70,8 @@ public void consume(final ConsumerRecord consumerRecord) log.debug("Invoking PE hooks for event name {}", event.getName()); for (PlatformEventHook hook : this.hooks) { - try (Timer.Context ignored = MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency") - .time()) { + try (Timer.Context ignored = + MetricUtils.timer(this.getClass(), hook.getClass().getSimpleName() + "_latency").time()) { hook.invoke(event); } catch (Exception e) { // Just skip this hook and continue. diff --git a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java index 878e4edd371bc..3083642c5bfb6 100644 --- a/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java +++ b/metadata-jobs/pe-consumer/src/main/java/com/datahub/event/hook/PlatformEventHook.java @@ -6,20 +6,15 @@ /** * Custom hook which is invoked on receiving a new {@link PlatformEvent} event. * - * The semantics of this hook are currently "at most once". That is, the hook will not be called + *

The semantics of this hook are currently "at most once". That is, the hook will not be called * with the same message. In the future, we intend to migrate to "at least once" semantics, meaning * that the hook will be responsible for implementing idempotency. */ public interface PlatformEventHook { - /** - * Initialize the hook - */ - default void init() { } + /** Initialize the hook */ + default void init() {} - /** - * Invoke the hook when a PlatformEvent is received - */ + /** Invoke the hook when a PlatformEvent is received */ void invoke(@Nonnull PlatformEvent event); - } diff --git a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java index aa86568bed01f..56d909781fd51 100644 --- a/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java +++ b/metadata-models-validator/src/main/java/com/linkedin/metadata/model/validation/ModelValidationTask.java @@ -4,25 +4,21 @@ import com.linkedin.pegasus.generator.DataSchemaParser; import java.io.IOException; - /** * Validates GMS PDL models by constructing a set of {@link EntitySpec}s from them. * - * The following validation rules are applied: - * - * 1. Each Entity Snapshot Model is annotated as @Entity with a common name - * 2. Each Aspect is annotated as @Aspect with a common name - * 3. Each @Searchable field is of primitive / list of primitive type - * 4. Each @Relationship field is of Urn / List of Urn type - * 5. Each Entity Snapshot includes a single Key Aspect + *

The following validation rules are applied: * + *

1. Each Entity Snapshot Model is annotated as @Entity with a common name 2. Each Aspect is + * annotated as @Aspect with a common name 3. Each @Searchable field is of primitive / list of + * primitive type 4. Each @Relationship field is of Urn / List of Urn type 5. Each Entity Snapshot + * includes a single Key Aspect */ public class ModelValidationTask { private static final String SNAPSHOT_SCHEMA_NAME = "com.linkedin.metadata.snapshot.Snapshot"; - private ModelValidationTask() { - } + private ModelValidationTask() {} public static void main(String[] args) throws IOException { if (args.length != 3) { @@ -34,21 +30,25 @@ public static void main(String[] args) throws IOException { final String modelPath = args[1]; final DataSchemaParser parser = new DataSchemaParser(resolverPath); - parser.parseSources(new String[]{modelPath}); + parser.parseSources(new String[] {modelPath}); - final DataSchema snapshotSchema = parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); + final DataSchema snapshotSchema = + parser.getSchemaResolver().existingDataSchema(SNAPSHOT_SCHEMA_NAME); if (snapshotSchema == null) { throw new RuntimeException( - String.format("Failed to find Snapshot model with name %s in parsed schemas!", SNAPSHOT_SCHEMA_NAME)); + String.format( + "Failed to find Snapshot model with name %s in parsed schemas!", + SNAPSHOT_SCHEMA_NAME)); } - // TODO: Fix this so that aspects that are just in the entity registry don't fail because they aren't in the + // TODO: Fix this so that aspects that are just in the entity registry don't fail because they + // aren't in the // snapshot registry. -// try { -// new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); -// } catch (Exception e) { -// throw new RuntimeException("Failed to validate DataHub PDL models", e); -// } + // try { + // new EntitySpecBuilder().buildEntitySpecs(snapshotSchema); + // } catch (Exception e) { + // throw new RuntimeException("Failed to validate DataHub PDL models", e); + // } } -} \ No newline at end of file +} diff --git a/metadata-models/build.gradle b/metadata-models/build.gradle index bd8052283e168..e90a4042c1921 100644 --- a/metadata-models/build.gradle +++ b/metadata-models/build.gradle @@ -75,11 +75,7 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: 'generateJsonSchema') } tasks.getByName("compileJava").dependsOn(openApiGenerate) -checkstyleMain.exclude '**/generated/**' - task cleanExtraDirs { delete "$projectDir/src/generatedJsonSchema" } clean.finalizedBy(cleanExtraDirs) - -checkstyleMain.exclude '**/generated/**' diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java index c3db318ece23e..29f58223a240a 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidation.java @@ -1,5 +1,8 @@ package com.linkedin.metadata; +import static com.linkedin.metadata.ModelValidationConstants.*; +import static org.testng.AssertJUnit.*; + import com.datahub.util.validator.AspectValidator; import com.datahub.util.validator.DeltaValidator; import com.datahub.util.validator.SnapshotValidator; @@ -13,10 +16,6 @@ import javax.annotation.Nonnull; import org.testng.annotations.Test; -import static com.linkedin.metadata.ModelValidationConstants.*; -import static org.testng.AssertJUnit.*; - - public class ModelValidation { @Test @@ -39,23 +38,28 @@ public void validateSnapshots() throws Exception { @Test public void validateDeltas() throws Exception { - getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES).forEach( - DeltaValidator::validateDeltaSchema); + getRecordTemplatesInPackage("com.linkedin.metadata.delta", IGNORED_DELTA_CLASSES) + .forEach(DeltaValidator::validateDeltaSchema); } - private List> getUnionTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set> ignoreClasses) throws IOException { + private List> getUnionTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, UnionTemplate.class, ignoreClasses); } - private List> getRecordTemplatesInPackage(@Nonnull String packageName, - @Nonnull Set> ignoreClasses) throws IOException { + private List> getRecordTemplatesInPackage( + @Nonnull String packageName, @Nonnull Set> ignoreClasses) + throws IOException { return getClassesInPackage(packageName, RecordTemplate.class, ignoreClasses); } @SuppressWarnings("unchecked") - private List> getClassesInPackage(@Nonnull String packageName, - @Nonnull Class parentClass, @Nonnull Set> ignoreClasses) throws IOException { + private List> getClassesInPackage( + @Nonnull String packageName, + @Nonnull Class parentClass, + @Nonnull Set> ignoreClasses) + throws IOException { return ClassPath.from(ClassLoader.getSystemClassLoader()) .getTopLevelClasses(packageName) .stream() diff --git a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java index 11fa8cdc965d4..f9e8fcc06bcbb 100644 --- a/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java +++ b/metadata-models/src/test/java/com/linkedin/metadata/ModelValidationConstants.java @@ -5,7 +5,6 @@ import com.linkedin.data.template.UnionTemplate; import java.util.Set; - public class ModelValidationConstants { private ModelValidationConstants() { diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java index d3c5ba822ac04..b8553235b3de7 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConfiguration.java @@ -3,31 +3,25 @@ import java.util.List; import lombok.Data; -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthenticationConfiguration { - /** - * Whether authentication is enabled - */ + /** Whether authentication is enabled */ private boolean enabled; + /** - * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be registered + * List of configurations for {@link com.datahub.plugins.auth.authentication.Authenticator}s to be + * registered */ private List authenticators; - /** - * Unique id to identify internal system callers - */ + + /** Unique id to identify internal system callers */ private String systemClientId; - /** - * Unique secret to authenticate internal system callers - */ + + /** Unique secret to authenticate internal system callers */ private String systemClientSecret; - /** - * The lifespan of a UI session token. - */ + /** The lifespan of a UI session token. */ private long sessionTokenDurationMs; private TokenServiceConfiguration tokenService; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java index 96a3f1b8f56bd..31cfe1c057468 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticationConstants.java @@ -1,29 +1,21 @@ package com.datahub.authentication; -/** - * A set of shared constants related to Authentication. - */ +/** A set of shared constants related to Authentication. */ public class AuthenticationConstants { - /** - * Name of the header which carries authorization information - */ + /** Name of the header which carries authorization information */ public static final String AUTHORIZATION_HEADER_NAME = "Authorization"; /** - * A deprecated header that previously carried the urn of the authenticated actor. - * This has been replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. + * A deprecated header that previously carried the urn of the authenticated actor. This has been + * replaced by the DELEGATED_FOR_ACTOR_ID and DELEGATED_FOR_ACTOR_TYPE headers. */ public static final String LEGACY_X_DATAHUB_ACTOR_HEADER = "X-DataHub-Actor"; - /** - * A header capturing the unique Actor Id that is delegating a request. - */ + /** A header capturing the unique Actor Id that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_ID_HEADER_NAME = "X-DataHub-Delegated-For-Id"; - /** - * A header capturing the unique Actor Type that is delegating a request. - */ + /** A header capturing the unique Actor Type that is delegating a request. */ public static final String DELEGATED_FOR_ACTOR_TYPE_HEADER_NAME = "X-DataHub-Delegated-For-Type"; public static final String SYSTEM_CLIENT_ID_CONFIG = "systemClientId"; @@ -32,6 +24,5 @@ public class AuthenticationConstants { public static final String ENTITY_SERVICE = "entityService"; public static final String TOKEN_SERVICE = "tokenService"; - private AuthenticationConstants() { - } + private AuthenticationConstants() {} } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java index 2d3cf5f588d7d..36814ee380e2f 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/AuthenticatorConfiguration.java @@ -3,18 +3,21 @@ import java.util.Map; import lombok.Data; - /** - * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations provided in the application.yml. + * POJO representing {@link com.datahub.plugins.auth.authentication.Authenticator} configurations + * provided in the application.yml. */ @Data public class AuthenticatorConfiguration { /** - * A fully-qualified class name for the {@link com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. + * A fully-qualified class name for the {@link + * com.datahub.plugins.auth.authentication.Authenticator} implementation to be registered. */ private String type; + /** - * A set of authenticator-specific configurations passed through during "init" of the authenticator. + * A set of authenticator-specific configurations passed through during "init" of the + * authenticator. */ private Map configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java index 0a606f0f06d92..70b93544bebdf 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authentication/TokenServiceConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data -/** - * Configurations for DataHub token service - */ +/** Configurations for DataHub token service */ public class TokenServiceConfiguration { private String signingKey; private String salt; diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java index 2770fc5c41aa0..5ed69d3e2ff8c 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java @@ -4,18 +4,12 @@ import java.util.List; import lombok.Data; - -/** - * POJO representing the "authentication" configuration block in application.yml. - */ +/** POJO representing the "authentication" configuration block in application.yml. */ @Data public class AuthorizationConfiguration { - /** - * Configuration for the default DataHub Policies-based authorizer. - */ + /** Configuration for the default DataHub Policies-based authorizer. */ private DefaultAuthorizerConfiguration defaultAuthorizer; - /** - * List of configurations for {@link Authorizer}s to be registered - */ + + /** List of configurations for {@link Authorizer}s to be registered */ private List authorizers; -} \ No newline at end of file +} diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java index 65cd6c17c739c..c4a26a1cd6276 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizerConfiguration.java @@ -4,22 +4,15 @@ import java.util.Map; import lombok.Data; - -/** - * POJO representing {@link Authorizer} configurations provided in the application.yml. - */ +/** POJO representing {@link Authorizer} configurations provided in the application.yml. */ @Data public class AuthorizerConfiguration { - /** - * Whether to enable this authorizer - */ + /** Whether to enable this authorizer */ private boolean enabled; - /** - * A fully-qualified class name for the {@link Authorizer} implementation to be registered. - */ + + /** A fully-qualified class name for the {@link Authorizer} implementation to be registered. */ private String type; - /** - * A set of authorizer-specific configurations passed through during "init" of the authorizer. - */ + + /** A set of authorizer-specific configurations passed through during "init" of the authorizer. */ private Map configs; } diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java index dfec06dedd147..c06e5b10b23f9 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/DefaultAuthorizerConfiguration.java @@ -2,15 +2,11 @@ import lombok.Data; - @Data public class DefaultAuthorizerConfiguration { - /** - * Whether authorization via DataHub policies is enabled. - */ + /** Whether authorization via DataHub policies is enabled. */ private boolean enabled; - /** - * The duration between policies cache refreshes. - */ + + /** The duration between policies cache refreshes. */ private int cacheRefreshIntervalSecs; } diff --git a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java index 8c7b3ac8b98f0..335a30280c3be 100644 --- a/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java +++ b/metadata-service/auth-filter/src/main/java/com/datahub/auth/authentication/filter/AuthenticationFilter.java @@ -1,19 +1,21 @@ package com.datahub.auth.authentication.filter; -import com.datahub.authentication.authenticator.AuthenticatorChain; -import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; -import com.datahub.authentication.authenticator.HealthStatusAuthenticator; -import com.datahub.authentication.authenticator.NoOpAuthenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.plugins.PluginConstant; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticationContext; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorConfiguration; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.authenticator.AuthenticatorChain; +import com.datahub.authentication.authenticator.DataHubSystemAuthenticator; +import com.datahub.authentication.authenticator.HealthStatusAuthenticator; +import com.datahub.authentication.authenticator.NoOpAuthenticator; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.plugins.PluginConstant; +import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -49,18 +51,14 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.web.context.support.SpringBeanAutowiringSupport; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter is applied to the - * GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. + * A servlet {@link Filter} for authenticating requests inbound to the Metadata Service. This filter + * is applied to the GraphQL Servlet, the Rest.li Servlet, and the Auth (token) Servlet. */ @Slf4j public class AuthenticationFilter implements Filter { - @Inject - private ConfigurationProvider configurationProvider; + @Inject private ConfigurationProvider configurationProvider; @Inject @Named("entityService") @@ -90,23 +88,28 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha authentication = this.authenticatorChain.authenticate(context, _logAuthenticatorExceptions); } catch (AuthenticationException e) { // For AuthenticationExpiredExceptions, terminate and provide that feedback to the user - log.debug("Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", + log.debug( + "Failed to authenticate request. Received an AuthenticationExpiredException from authenticator chain.", e); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, e.getMessage()); return; } if (authentication != null) { // Successfully authenticated. - log.debug(String.format("Successfully authenticated request for Actor with type: %s, id: %s", - authentication.getActor().getType(), authentication.getActor().getId())); + log.debug( + String.format( + "Successfully authenticated request for Actor with type: %s, id: %s", + authentication.getActor().getType(), authentication.getActor().getId())); AuthenticationContext.setAuthentication(authentication); chain.doFilter(request, response); } else { // Reject request - log.debug("Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); - ((HttpServletResponse) response).sendError(HttpServletResponse.SC_UNAUTHORIZED, - "Unauthorized to perform this action."); + log.debug( + "Failed to authenticate request. Received 'null' Authentication value from authenticator chain."); + ((HttpServletResponse) response) + .sendError(HttpServletResponse.SC_UNAUTHORIZED, "Unauthorized to perform this action."); return; } AuthenticationContext.remove(); @@ -120,9 +123,10 @@ public void destroy() { /** * Constructs an {@link AuthenticatorChain} via the provided {@link AuthenticationConfiguration}. * - * The process is simple: For each configured {@link Authenticator}, attempt to instantiate the class using a default (zero-arg) - * constructor, then call it's initialize method passing in a freeform block of associated configurations as a {@link Map}. Finally, - * register the {@link Authenticator} in the authenticator chain. + *

The process is simple: For each configured {@link Authenticator}, attempt to instantiate the + * class using a default (zero-arg) constructor, then call it's initialize method passing in a + * freeform block of associated configurations as a {@link Map}. Finally, register the {@link + * Authenticator} in the authenticator chain. */ private void buildAuthenticatorChain() { @@ -130,89 +134,123 @@ private void buildAuthenticatorChain() { boolean isAuthEnabled = this.configurationProvider.getAuthentication().isEnabled(); - // Create authentication context object to pass to authenticator instances. They can use it as needed. - final AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); + // Create authentication context object to pass to authenticator instances. They can use it as + // needed. + final AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + ENTITY_SERVICE, this._entityService, TOKEN_SERVICE, this._tokenService)); if (isAuthEnabled) { log.info("Auth is enabled. Building authenticator chain..."); - this.registerNativeAuthenticator(authenticatorChain, authenticatorContext); // Register native authenticators + this.registerNativeAuthenticator( + authenticatorChain, authenticatorContext); // Register native authenticators this.registerPlugins(authenticatorChain); // Register plugin authenticators } else { - // Authentication is not enabled. Populate authenticator chain with a purposely permissive Authenticator. + // Authentication is not enabled. Populate authenticator chain with a purposely permissive + // Authenticator. log.info("Auth is disabled. Building no-op authenticator chain..."); final NoOpAuthenticator noOpAuthenticator = new NoOpAuthenticator(); noOpAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId()), authenticatorContext); authenticatorChain.register(noOpAuthenticator); } } private AuthenticationRequest buildAuthContext(HttpServletRequest request) { - return new AuthenticationRequest(request.getServletPath(), request.getPathInfo(), Collections.list(request.getHeaderNames()) - .stream() - .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); + return new AuthenticationRequest( + request.getServletPath(), + request.getPathInfo(), + Collections.list(request.getHeaderNames()).stream() + .collect(Collectors.toMap(headerName -> headerName, request::getHeader))); } private void registerPlugins(AuthenticatorChain authenticatorChain) { - // TODO: Introduce plugin factory to reduce duplicate code around authentication and authorization processing + // TODO: Introduce plugin factory to reduce duplicate code around authentication and + // authorization processing ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); Optional optionalConfig = (new ConfigProvider(pluginBaseDirectory)).load(); - optionalConfig.ifPresent((config) -> { - log.info("Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); - PluginConfigFactory authenticatorPluginPluginConfigFactory = - new PluginConfigFactory(config); + optionalConfig.ifPresent( + (config) -> { + log.info( + "Processing authenticator plugin from auth plugin directory {}", pluginBaseDirectory); + PluginConfigFactory authenticatorPluginPluginConfigFactory = + new PluginConfigFactory(config); - List authorizers = - authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - // Filter enabled authenticator plugins - List enabledAuthenticators = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authenticator %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); + // Filter enabled authenticator plugins + List enabledAuthenticators = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info( + String.format( + "Authenticator %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); - SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); - // Create permission manager with security mode - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); + SecurityMode securityMode = + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + // Create permission manager with security mode + PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Initiate Authenticators - enabledAuthenticators.forEach((pluginConfig) -> { - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - // Create context - AuthenticatorContext context = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString())); + // Initiate Authenticators + enabledAuthenticators.forEach( + (pluginConfig) -> { + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + // Create context + AuthenticatorContext context = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + pluginConfig.getPluginHomeDirectory().toString())); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authenticator.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - authenticatorChain.register(authenticator); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(String.format("Plugin className %s not found", pluginConfig.getClassName()), e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); - }); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authenticator.init( + pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + authenticatorChain.register(authenticator); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + throw new RuntimeException( + String.format("Plugin className %s not found", pluginConfig.getClassName()), + e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); + }); } - private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { + private void registerNativeAuthenticator( + AuthenticatorChain authenticatorChain, AuthenticatorContext authenticatorContext) { log.info("Registering native authenticators"); // Register system authenticator DataHubSystemAuthenticator systemAuthenticator = new DataHubSystemAuthenticator(); systemAuthenticator.init( - ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, this.configurationProvider.getAuthentication().getSystemClientId(), - SYSTEM_CLIENT_SECRET_CONFIG, this.configurationProvider.getAuthentication().getSystemClientSecret()), + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId(), + SYSTEM_CLIENT_SECRET_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientSecret()), authenticatorContext); - authenticatorChain.register(systemAuthenticator); // Always register authenticator for internal system. + authenticatorChain.register( + systemAuthenticator); // Always register authenticator for internal system. // Register authenticator define in application.yml final List authenticatorConfigurations = @@ -229,14 +267,16 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, clazz = (Class) Class.forName(type); } catch (ClassNotFoundException e) { throw new RuntimeException( - String.format("Failed to find Authenticator class with name %s on the classpath.", type)); + String.format( + "Failed to find Authenticator class with name %s on the classpath.", type)); } // Ensure class conforms to the correct type. if (!Authenticator.class.isAssignableFrom(clazz)) { - throw new IllegalArgumentException(String.format( - "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", - clazz.getCanonicalName())); + throw new IllegalArgumentException( + String.format( + "Failed to instantiate invalid Authenticator with class name %s. Class does not implement the 'Authenticator' interface", + clazz.getCanonicalName())); } // Else construct an instance of the class, each class should have an empty constructor. @@ -245,9 +285,14 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, // Successfully created authenticator. Now init and register it. log.debug(String.format("Initializing Authenticator with name %s", type)); if (authenticator instanceof HealthStatusAuthenticator) { - Map authenticatorConfig = new HashMap<>(Map.of(SYSTEM_CLIENT_ID_CONFIG, - this.configurationProvider.getAuthentication().getSystemClientId())); - authenticatorConfig.putAll(Optional.ofNullable(internalAuthenticatorConfig.getConfigs()).orElse(Collections.emptyMap())); + Map authenticatorConfig = + new HashMap<>( + Map.of( + SYSTEM_CLIENT_ID_CONFIG, + this.configurationProvider.getAuthentication().getSystemClientId())); + authenticatorConfig.putAll( + Optional.ofNullable(internalAuthenticatorConfig.getConfigs()) + .orElse(Collections.emptyMap())); authenticator.init(authenticatorConfig, authenticatorContext); } else { authenticator.init(configs, authenticatorContext); @@ -256,8 +301,10 @@ private void registerNativeAuthenticator(AuthenticatorChain authenticatorChain, authenticatorChain.register(authenticator); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), e); + String.format( + "Failed to instantiate Authenticator with class name %s", clazz.getCanonicalName()), + e); } } } -} \ No newline at end of file +} diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java index 05ca428283a6c..471fdf8c36903 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthTestConfiguration.java @@ -1,5 +1,7 @@ package com.datahub.auth.authentication; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.AuthenticatorConfiguration; @@ -17,27 +19,25 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static org.mockito.Mockito.*; - @Configuration public class AuthTestConfiguration { - @Bean public EntityService entityService() { return mock(EntityService.class); } @Bean("dataHubTokenService") - public StatefulTokenService statefulTokenService(ConfigurationProvider configurationProvider, EntityService entityService) { - TokenServiceConfiguration tokenServiceConfiguration = configurationProvider.getAuthentication().getTokenService(); + public StatefulTokenService statefulTokenService( + ConfigurationProvider configurationProvider, EntityService entityService) { + TokenServiceConfiguration tokenServiceConfiguration = + configurationProvider.getAuthentication().getTokenService(); return new StatefulTokenService( tokenServiceConfiguration.getSigningKey(), tokenServiceConfiguration.getSigningAlgorithm(), tokenServiceConfiguration.getIssuer(), entityService, - tokenServiceConfiguration.getSalt() - ); + tokenServiceConfiguration.getSalt()); } @Bean @@ -59,8 +59,12 @@ public ConfigurationProvider configurationProvider() { authenticationConfiguration.setTokenService(tokenServiceConfiguration); AuthenticatorConfiguration authenticator = new AuthenticatorConfiguration(); authenticator.setType("com.datahub.authentication.authenticator.DataHubTokenAuthenticator"); - authenticator.setConfigs(Map.of("signingKey", "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", - "salt", "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); + authenticator.setConfigs( + Map.of( + "signingKey", + "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94=", + "salt", + "ohDVbJBvHHVJh9S/UA4BYF9COuNnqqVhr9MLKEGXk1O=")); List authenticators = List.of(authenticator); authenticationConfiguration.setAuthenticators(authenticators); authPluginConfiguration.setPath(""); diff --git a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java index 2ac65bf09c912..746138e4ee90f 100644 --- a/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java +++ b/metadata-service/auth-filter/src/test/java/com/datahub/auth/authentication/AuthenticationFilterTest.java @@ -1,5 +1,8 @@ package com.datahub.auth.authentication; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.mockito.Mockito.*; + import com.datahub.auth.authentication.filter.AuthenticationFilter; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; @@ -17,18 +20,12 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.mockito.Mockito.*; - - -@ContextConfiguration(classes = { AuthTestConfiguration.class }) +@ContextConfiguration(classes = {AuthTestConfiguration.class}) public class AuthenticationFilterTest extends AbstractTestNGSpringContextTests { - @Autowired - AuthenticationFilter _authenticationFilter; + @Autowired AuthenticationFilter _authenticationFilter; - @Autowired - StatefulTokenService _statefulTokenService; + @Autowired StatefulTokenService _statefulTokenService; @Test public void testExpiredToken() throws ServletException, IOException, TokenException { @@ -37,17 +34,20 @@ public void testExpiredToken() throws ServletException, IOException, TokenExcept HttpServletResponse servletResponse = mock(HttpServletResponse.class); FilterChain filterChain = mock(FilterChain.class); Actor actor = new Actor(ActorType.USER, "datahub"); -// String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, System.currentTimeMillis(), "token", -// "token", actor.toUrnStr()); + // String token = _statefulTokenService.generateAccessToken(TokenType.SESSION, actor, 0L, + // System.currentTimeMillis(), "token", + // "token", actor.toUrnStr()); // Token generated 9/11/23, invalid for all future dates - String token = "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" - + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" - + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; - when(servletRequest.getHeaderNames()).thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); - when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)) - .thenReturn("Bearer " + token); + String token = + "eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIZCI6ImRhdGFodWIiLCJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwian" + + "RpIjoiMmI0MzZkZDAtYjEwOS00N2UwLWJmYTEtMzM2ZmU4MTU4MDE1Iiwic3ViIjoiZGF0YWh1YiIsImV4cCI6MTY5NDU0NzA2OCwiaXNzIjoiZGF" + + "0YWh1Yi1tZXRhZGF0YS1zZXJ2aWNlIn0.giqx7J5a9mxuubG6rXdAMoaGlcII-fqY-W82Wm7OlLI"; + when(servletRequest.getHeaderNames()) + .thenReturn(Collections.enumeration(List.of(AUTHORIZATION_HEADER_NAME))); + when(servletRequest.getHeader(AUTHORIZATION_HEADER_NAME)).thenReturn("Bearer " + token); _authenticationFilter.doFilter(servletRequest, servletResponse, filterChain); - verify(servletResponse, times(1)).sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); + verify(servletResponse, times(1)) + .sendError(eq(HttpServletResponse.SC_UNAUTHORIZED), anyString()); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java index e72225e6ee990..b69a8a7818485 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/AuthenticatorChain.java @@ -1,27 +1,26 @@ package com.datahub.authentication.authenticator; import com.datahub.authentication.Authentication; - import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.util.Pair; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authenticator}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authenticator}s are registered with the chain using {@link #register(Authenticator)}. - * The chain can be executed by invoking {@link #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. + *

Individual {@link Authenticator}s are registered with the chain using {@link + * #register(Authenticator)}. The chain can be executed by invoking {@link + * #authenticate(AuthenticationRequest)} with an instance of {@link AuthenticationRequest}. */ @Slf4j public class AuthenticatorChain { @@ -39,21 +38,30 @@ public void register(@Nonnull final Authenticator authenticator) { } /** - * Executes a set of {@link Authenticator}s and returns the first successful authentication result. + * Executes a set of {@link Authenticator}s and returns the first successful authentication + * result. * - * Returns an instance of {@link Authentication} if the incoming request is successfully authenticated. - * Returns null if {@link Authentication} cannot be resolved for the incoming request. + *

Returns an instance of {@link Authentication} if the incoming request is successfully + * authenticated. Returns null if {@link Authentication} cannot be resolved for the incoming + * request. */ @Nullable - public Authentication authenticate(@Nonnull final AuthenticationRequest context, boolean logExceptions) throws AuthenticationException { + public Authentication authenticate( + @Nonnull final AuthenticationRequest context, boolean logExceptions) + throws AuthenticationException { Objects.requireNonNull(context); ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); List> authenticationFailures = new ArrayList<>(); for (final Authenticator authenticator : this.authenticators) { try { - log.debug(String.format("Executing Authenticator with class name %s", authenticator.getClass().getCanonicalName())); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + log.debug( + String.format( + "Executing Authenticator with class name %s", + authenticator.getClass().getCanonicalName())); + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authenticator.getClass().getClassLoader()); Authentication result = authenticator.authenticate(context); @@ -65,13 +73,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } } catch (AuthenticationExpiredException e) { // Throw if it's an AuthenticationException to propagate the error message to the end user - log.debug(String.format("Unable to authenticate request using Authenticator %s", authenticator.getClass().getCanonicalName()), e); + log.debug( + String.format( + "Unable to authenticate request using Authenticator %s", + authenticator.getClass().getCanonicalName()), + e); throw e; } catch (Exception e) { // Log as a normal error otherwise. - log.debug(String.format( + log.debug( + String.format( "Caught exception while attempting to authenticate request using Authenticator %s", - authenticator.getClass().getCanonicalName()), e); + authenticator.getClass().getCanonicalName()), + e); authenticationFailures.add(new Pair<>(authenticator.getClass().getCanonicalName(), e)); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); @@ -79,14 +93,19 @@ public Authentication authenticate(@Nonnull final AuthenticationRequest context, } // No authentication resolved. Return null. if (!authenticationFailures.isEmpty()) { - List> shortMessage = authenticationFailures.stream() - .peek(p -> { - if (logExceptions) { - log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); - } - }) - .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())).collect(Collectors.toList()); - log.warn("Authentication chain failed to resolve a valid authentication. Errors: {}", shortMessage); + List> shortMessage = + authenticationFailures.stream() + .peek( + p -> { + if (logExceptions) { + log.error("Error during {} authentication: ", p.getFirst(), p.getSecond()); + } + }) + .map(p -> Pair.of(p.getFirst(), p.getSecond().getMessage())) + .collect(Collectors.toList()); + log.warn( + "Authentication chain failed to resolve a valid authentication. Errors: {}", + shortMessage); } return null; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java index 524c12c56c266..635a87dc84c11 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticator.java @@ -1,29 +1,28 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; import com.datahub.authentication.token.DataHubJwtSigningKeyResolver; +import com.datahub.plugins.auth.authentication.Authenticator; import io.jsonwebtoken.Claims; -import io.jsonwebtoken.Jwts; import io.jsonwebtoken.Jws; +import io.jsonwebtoken.Jwts; +import java.util.HashSet; import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.HashSet; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved actor urn. - * Supported algorithm at this moment RSA + * This Authenticator verifies third party token and allows to pass claim for "id" part of resolved + * actor urn. Supported algorithm at this moment RSA */ @Slf4j public class DataHubJwtTokenAuthenticator implements Authenticator { @@ -33,57 +32,67 @@ public class DataHubJwtTokenAuthenticator implements Authenticator { static final String DEFAULT_SIGNING_ALG = "RSA"; /** - * idUserClaim allows you to select which claim will be used as the "id" part of the resolved actor urn, e.g. "urn:li:corpuser:" - * **/ + * idUserClaim allows you to select which claim will be used as the "id" part of the resolved + * actor urn, e.g. "urn:li:corpuser:" * + */ private String userIdClaim; - /** - * List of trusted issuers - * **/ + /** List of trusted issuers * */ private HashSet trustedIssuers; /** - * This public key is optional and should be used if token public key is not available online or will not change for signed token. - * **/ + * This public key is optional and should be used if token public key is not available online or + * will not change for signed token. * + */ private String publicKey; /** - * Algorithm used to sign your token. - * This is optional and can be skiped if public key is available online. - * **/ + * Algorithm used to sign your token. This is optional and can be skiped if public key is + * available online. * + */ private String algorithm; @Override - public void init(@Nonnull final Map config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.userIdClaim = config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); + this.userIdClaim = + config.get("userIdClaim") == null ? DEFAULT_USER_CLAIM : (String) config.get("userIdClaim"); - Map issuers = Objects.requireNonNull((Map) config.get("trustedIssuers"), - "Missing required config trusted issuers"); + Map issuers = + Objects.requireNonNull( + (Map) config.get("trustedIssuers"), + "Missing required config trusted issuers"); this.trustedIssuers = new HashSet(issuers.values()); this.publicKey = (String) config.get("publicKey"); - this.algorithm = config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); + this.algorithm = + config.get("algorithm") == null ? DEFAULT_SIGNING_ALG : (String) config.get("algorithm"); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); try { String jwtToken = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); - if (jwtToken == null || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { + if (jwtToken == null + || (!jwtToken.startsWith("Bearer ") && !jwtToken.startsWith("bearer "))) { throw new AuthenticationException("Invalid Authorization token"); } String token = getToken(jwtToken); - Jws claims = Jwts.parserBuilder() - .setSigningKeyResolver(new DataHubJwtSigningKeyResolver(this.trustedIssuers, this.publicKey, this.algorithm)) - .build() - .parseClaimsJws(token); + Jws claims = + Jwts.parserBuilder() + .setSigningKeyResolver( + new DataHubJwtSigningKeyResolver( + this.trustedIssuers, this.publicKey, this.algorithm)) + .build() + .parseClaimsJws(token); final String userClaim = claims.getBody().get(userIdClaim, String.class); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java index 70a4abc3fd18d..9a25a51b72622 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; +import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; -import javax.annotation.Nonnull; +import com.datahub.plugins.auth.authentication.Authenticator; import java.util.Collections; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * Authenticator that verifies system internal callers, such as the metadata-service itself OR datahub-frontend, - * using HTTP Basic Authentication. - * - * This makes use of a single "system client id" and "system shared secret" which each - * component in the system is configured to provide. + * Authenticator that verifies system internal callers, such as the metadata-service itself OR + * datahub-frontend, using HTTP Basic Authentication. * - * This authenticator requires the following configurations: + *

This makes use of a single "system client id" and "system shared secret" which each component + * in the system is configured to provide. * - * - systemClientId: an identifier for internal system callers, provided in the Authorization header via Basic Authentication. - * - systemClientSecret: a shared secret used to authenticate internal system callers + *

This authenticator requires the following configurations: * + *

- systemClientId: an identifier for internal system callers, provided in the Authorization + * header via Basic Authentication. - systemClientSecret: a shared secret used to authenticate + * internal system callers */ @Slf4j public class DataHubSystemAuthenticator implements Authenticator { @@ -37,16 +36,22 @@ public class DataHubSystemAuthenticator implements Authenticator { private String systemClientSecret; @Override - public void init(@Nonnull final Map config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); - this.systemClientSecret = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientSecret = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_SECRET_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_SECRET_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); if (authorizationHeader != null) { @@ -57,16 +62,18 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw if (splitCredentials.length == 2 && this.systemClientId.equals(splitCredentials[0]) - && this.systemClientSecret.equals(splitCredentials[1]) - ) { + && this.systemClientSecret.equals(splitCredentials[1])) { // If this request was made internally, there may be a delegated id. return new Authentication( - new Actor(ActorType.USER, this.systemClientId), // todo: replace this with service actor type once they exist. + new Actor( + ActorType.USER, + this.systemClientId), // todo: replace this with service actor type once they + // exist. authorizationHeader, - Collections.emptyMap() - ); + Collections.emptyMap()); } else { - throw new AuthenticationException("Provided credentials do not match known system client id & client secret. Check your configuration values..."); + throw new AuthenticationException( + "Provided credentials do not match known system client id & client secret. Check your configuration values..."); } } else { throw new AuthenticationException("Authorization header is missing 'Basic' prefix."); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java index e7e776999f34e..f1d1f5a80119c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticator.java @@ -1,34 +1,33 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.token.StatefulTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenClaims; -import com.datahub.authentication.token.TokenExpiredException; -import com.datahub.authentication.Actor; +import static com.datahub.authentication.AuthenticationConstants.*; +import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConstants; +import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; import com.datahub.authentication.AuthenticationRequest; -import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.authentication.token.StatefulTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenClaims; +import com.datahub.authentication.token.TokenExpiredException; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; import java.util.Map; import java.util.Objects; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** * Authenticator that verifies DataHub-issued JSON web tokens. * - * This authenticator requires the following configurations: + *

This authenticator requires the following configurations: * - * - signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE used by the {@link StatelessTokenService}. Defaults to HS256. - * - signingKey: a key used to sign all JWT tokens using the provided signingAlgorithm + *

- signingAlgorithm (optional): the algorithm used to verify JWT's. This should be THE SAME ONE + * used by the {@link StatelessTokenService}. Defaults to HS256. - signingKey: a key used to sign + * all JWT tokens using the provided signingAlgorithm */ @Slf4j public class DataHubTokenAuthenticator implements Authenticator { @@ -47,28 +46,35 @@ public void init(@Nonnull final Map config, final AuthenticatorC Objects.requireNonNull(config, "Config parameter cannot be null"); Objects.requireNonNull(context, "Context parameter cannot be null"); final String signingKey = - Objects.requireNonNull((String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); + Objects.requireNonNull( + (String) config.get(SIGNING_KEY_CONFIG_NAME), "signingKey is a required config"); final String salt = Objects.requireNonNull((String) config.get(SALT_CONFIG_NAME), "salt is a required config"); - final String signingAlgorithm = (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); + final String signingAlgorithm = + (String) config.getOrDefault(SIGNING_ALG_CONFIG_NAME, DEFAULT_SIGNING_ALG); log.debug(String.format("Creating TokenService using signing algorithm %s", signingAlgorithm)); if (!context.data().containsKey(AuthenticationConstants.ENTITY_SERVICE)) { - throw new IllegalArgumentException("Unable to initialize DataHubTokenAuthenticator, entity service reference not" - + " found."); + throw new IllegalArgumentException( + "Unable to initialize DataHubTokenAuthenticator, entity service reference not" + + " found."); } final Object entityService = context.data().get(ENTITY_SERVICE); if (!(entityService instanceof EntityService)) { throw new RuntimeException( "Unable to initialize DataHubTokenAuthenticator, entity service reference is not of type: " - + "EntityService.class, found: " + entityService.getClass()); + + "EntityService.class, found: " + + entityService.getClass()); } - this._statefulTokenService = (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); + this._statefulTokenService = + (StatefulTokenService) Objects.requireNonNull(context.data().get(TOKEN_SERVICE)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - final String authorizationHeader = context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive + final String authorizationHeader = + context.getRequestHeaders().get(AUTHORIZATION_HEADER_NAME); // Case insensitive if (authorizationHeader != null) { if (authorizationHeader.startsWith("Bearer ") || authorizationHeader.startsWith("bearer ")) { return validateAndExtract(authorizationHeader); @@ -79,12 +85,14 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw throw new AuthenticationException("Request is missing 'Authorization' header."); } - private Authentication validateAndExtract(final String credentials) throws AuthenticationException { + private Authentication validateAndExtract(final String credentials) + throws AuthenticationException { log.debug("Found authentication token. Verifying..."); final String token = credentials.substring(7); try { final TokenClaims claims = this._statefulTokenService.validateAccessToken(token); - return new Authentication(new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); + return new Authentication( + new Actor(claims.getActorType(), claims.getActorId()), credentials, claims.asMap()); } catch (TokenExpiredException e) { throw new AuthenticationExpiredException(e.getMessage(), e); } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java index 5749eacf5d25d..65581f1d5b635 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/HealthStatusAuthenticator.java @@ -1,5 +1,7 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,48 +9,45 @@ import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; import com.datahub.plugins.auth.authentication.Authenticator; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; - -import static com.datahub.authentication.AuthenticationConstants.SYSTEM_CLIENT_ID_CONFIG; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This Authenticator is used for allowing access for unauthenticated health check endpoints * - * It exists to support load balancers, liveness/readiness checks - * + *

It exists to support load balancers, liveness/readiness checks */ @Slf4j public class HealthStatusAuthenticator implements Authenticator { - private static final Set HEALTH_ENDPOINTS = Set.of( - "/openapi/check/", - "/openapi/up/" - ); + private static final Set HEALTH_ENDPOINTS = Set.of("/openapi/check/", "/openapi/up/"); private String systemClientId; @Override - public void init(@Nonnull final Map config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); - if (HEALTH_ENDPOINTS.stream().anyMatch(prefix -> String.join("", context.getServletInfo(), context.getPathInfo()).startsWith(prefix))) { + if (HEALTH_ENDPOINTS.stream() + .anyMatch( + prefix -> + String.join("", context.getServletInfo(), context.getPathInfo()) + .startsWith(prefix))) { return new Authentication( - new Actor(ActorType.USER, systemClientId), - "", - Collections.emptyMap() - ); + new Actor(ActorType.USER, systemClientId), "", Collections.emptyMap()); } throw new AuthenticationException("Authorization not allowed. Non-health check endpoint."); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java index 4e1b3cf7f73aa..19f135debdae4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/authenticator/NoOpAuthenticator.java @@ -1,12 +1,14 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticatorContext; +import com.datahub.plugins.auth.authentication.Authenticator; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.Constants; import java.util.Collections; @@ -16,16 +18,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.datahub.authentication.AuthenticationConstants.*; - - /** - * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid Authentication, or fall - * back to resolving a system {@link Actor} by default. + * This Authenticator is used as a no-op to simply convert the X-DataHub-Actor header into a valid + * Authentication, or fall back to resolving a system {@link Actor} by default. * - * It exists to support deployments that do not have Metadata Service Authentication enabled. + *

It exists to support deployments that do not have Metadata Service Authentication enabled. * - * Notice that this authenticator should generally be avoided in production. + *

Notice that this authenticator should generally be avoided in production. */ @Slf4j public class NoOpAuthenticator implements Authenticator { @@ -33,24 +32,29 @@ public class NoOpAuthenticator implements Authenticator { private String systemClientId; @Override - public void init(@Nonnull final Map config, @Nullable final AuthenticatorContext context) { + public void init( + @Nonnull final Map config, @Nullable final AuthenticatorContext context) { Objects.requireNonNull(config, "Config parameter cannot be null"); - this.systemClientId = Objects.requireNonNull((String) config.get(SYSTEM_CLIENT_ID_CONFIG), - String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); + this.systemClientId = + Objects.requireNonNull( + (String) config.get(SYSTEM_CLIENT_ID_CONFIG), + String.format("Missing required config %s", SYSTEM_CLIENT_ID_CONFIG)); } @Override - public Authentication authenticate(@Nonnull AuthenticationRequest context) throws AuthenticationException { + public Authentication authenticate(@Nonnull AuthenticationRequest context) + throws AuthenticationException { Objects.requireNonNull(context); String actorUrn = context.getRequestHeaders().get(LEGACY_X_DATAHUB_ACTOR_HEADER); // For backwards compatibility, support pulling actor context from the deprecated // X-DataHub-Actor header. if (actorUrn == null || "".equals(actorUrn)) { - log.debug(String.format("Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", Constants.UNKNOWN_ACTOR)); - return new Authentication( - new Actor(ActorType.USER, this.systemClientId), "" - ); + log.debug( + String.format( + "Found no X-DataHub-Actor header provided with the request. Falling back to system creds %s", + Constants.UNKNOWN_ACTOR)); + return new Authentication(new Actor(ActorType.USER, this.systemClientId), ""); } // If not provided, fallback to system caller identity. @@ -58,8 +62,7 @@ public Authentication authenticate(@Nonnull AuthenticationRequest context) throw // When authentication is disabled, assume everyone is a normal user. new Actor(ActorType.USER, getActorIdFromUrn(actorUrn)), "", // No Credentials provided. - Collections.emptyMap() - ); + Collections.emptyMap()); } private String getActorIdFromUrn(final String urnStr) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java index 29ec2f73dc688..f33ae5de130da 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/group/GroupService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -35,15 +37,14 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class GroupService { private final EntityClient _entityClient; private final EntityService _entityService; private final GraphClient _graphClient; - public GroupService(@Nonnull EntityClient entityClient, @Nonnull EntityService entityService, + public GroupService( + @Nonnull EntityClient entityClient, + @Nonnull EntityService entityService, @Nonnull GraphClient graphClient) { Objects.requireNonNull(entityClient, "entityClient must not be null!"); Objects.requireNonNull(entityService, "entityService must not be null!"); @@ -64,7 +65,9 @@ public Origin getGroupOrigin(@Nonnull final Urn groupUrn) { return (Origin) _entityService.getLatestAspect(groupUrn, ORIGIN_ASPECT_NAME); } - public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn groupUrn, + public void addUserToNativeGroup( + @Nonnull final Urn userUrn, + @Nonnull final Urn groupUrn, final Authentication authentication) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(groupUrn, "groupUrn must not be null"); @@ -76,7 +79,8 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn try { // First, fetch user's group membership aspect. - NativeGroupMembership nativeGroupMembership = getExistingNativeGroupMembership(userUrn, authentication); + NativeGroupMembership nativeGroupMembership = + getExistingNativeGroupMembership(userUrn, authentication); // Handle the duplicate case. nativeGroupMembership.getNativeGroups().remove(groupUrn); nativeGroupMembership.getNativeGroups().add(groupUrn); @@ -94,13 +98,18 @@ public void addUserToNativeGroup(@Nonnull final Urn userUrn, @Nonnull final Urn } } - public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + public String createNativeGroup( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); - Urn corpGroupUrn = EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); + Urn corpGroupUrn = + EntityKeyUtils.convertEntityKeyToUrn(corpGroupKey, Constants.CORP_GROUP_ENTITY_NAME); if (groupExists(corpGroupUrn)) { throw new IllegalArgumentException("This Group already exists!"); } @@ -110,22 +119,34 @@ public String createNativeGroup(@Nonnull final CorpGroupKey corpGroupKey, @Nonnu return groupInfo; } - public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List userUrnList, - final Authentication authentication) throws Exception { + public void removeExistingNativeGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { - final Map entityResponseMap = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication); + final Map entityResponseMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { continue; } - final NativeGroupMembership nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + final NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); if (nativeGroupMembership.getNativeGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -139,8 +160,9 @@ public void removeExistingNativeGroupMembers(@Nonnull final Urn groupUrn, @Nonnu } } - public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn groupUrn, final String actorUrnStr, - final Authentication authentication) throws Exception { + public void migrateGroupMembershipToNativeGroupMembership( + @Nonnull final Urn groupUrn, final String actorUrnStr, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Get the existing set of users @@ -153,26 +175,41 @@ public void migrateGroupMembershipToNativeGroupMembership(@Nonnull final Urn gro userUrnList.forEach(userUrn -> addUserToNativeGroup(userUrn, groupUrn, authentication)); } - NativeGroupMembership getExistingNativeGroupMembership(@Nonnull final Urn userUrn, - final Authentication authentication) throws Exception { + NativeGroupMembership getExistingNativeGroupMembership( + @Nonnull final Urn userUrn, final Authentication authentication) throws Exception { final EntityResponse entityResponse = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(userUrn), - Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), authentication).get(userUrn); + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + Collections.singleton(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + authentication) + .get(userUrn); NativeGroupMembership nativeGroupMembership; - if (entityResponse == null || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { + if (entityResponse == null + || !entityResponse.getAspects().containsKey(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { // If the user doesn't have the NativeGroupMembership aspect, create one. nativeGroupMembership = new NativeGroupMembership(); nativeGroupMembership.setNativeGroups(new UrnArray()); } else { - nativeGroupMembership = new NativeGroupMembership( - entityResponse.getAspects().get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + nativeGroupMembership = + new NativeGroupMembership( + entityResponse + .getAspects() + .get(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME) + .getValue() + .data()); } return nativeGroupMembership; } - String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final String groupName, - @Nonnull final String groupDescription, final Authentication authentication) throws Exception { + String createGroupInfo( + @Nonnull final CorpGroupKey corpGroupKey, + @Nonnull final String groupName, + @Nonnull final String groupDescription, + final Authentication authentication) + throws Exception { Objects.requireNonNull(corpGroupKey, "corpGroupKey must not be null"); Objects.requireNonNull(groupName, "groupName must not be null"); Objects.requireNonNull(groupDescription, "groupDescription must not be null"); @@ -184,7 +221,10 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final corpGroupInfo.setGroups(new CorpGroupUrnArray()); corpGroupInfo.setMembers(new CorpuserUrnArray()); corpGroupInfo.setAdmins(new CorpuserUrnArray()); - corpGroupInfo.setCreated(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + corpGroupInfo.setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -196,7 +236,8 @@ String createGroupInfo(@Nonnull final CorpGroupKey corpGroupKey, @Nonnull final return _entityClient.ingestProposal(proposal, authentication); } - void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) throws Exception { + void createNativeGroupOrigin(@Nonnull final Urn groupUrn, final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); // Create the Group info. @@ -217,20 +258,33 @@ List getExistingGroupMembers(@Nonnull final Urn groupUrn, final String acto Objects.requireNonNull(groupUrn, "groupUrn must not be null"); final EntityRelationships relationships = - _graphClient.getRelatedEntities(groupUrn.toString(), ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, 0, 500, actorUrnStr); - return relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + _graphClient.getRelatedEntities( + groupUrn.toString(), + ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + actorUrnStr); + return relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); } - void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List userUrnList, - final Authentication authentication) throws Exception { + void removeExistingGroupMembers( + @Nonnull final Urn groupUrn, + @Nonnull final List userUrnList, + final Authentication authentication) + throws Exception { Objects.requireNonNull(groupUrn, "groupUrn must not be null"); Objects.requireNonNull(userUrnList, "userUrnList must not be null"); final Set userUrns = new HashSet<>(userUrnList); for (Urn userUrn : userUrns) { final Map entityResponseMap = - _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, userUrns, Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + userUrns, + Collections.singleton(GROUP_MEMBERSHIP_ASPECT_NAME), authentication); EntityResponse entityResponse = entityResponseMap.get(userUrn); if (entityResponse == null) { @@ -238,7 +292,8 @@ void removeExistingGroupMembers(@Nonnull final Urn groupUrn, @Nonnull final List } final GroupMembership groupMembership = - new GroupMembership(entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); + new GroupMembership( + entityResponse.getAspects().get(GROUP_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (groupMembership.getGroups().remove(groupUrn)) { // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal = new MetadataChangeProposal(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java index 35052810236a0..73add48958f60 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class InviteTokenService { @@ -42,26 +41,33 @@ public Urn getInviteTokenUrn(@Nonnull final String inviteTokenStr) throws URISyn return Urn.createFromString(inviteTokenUrnStr); } - public boolean isInviteTokenValid(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public boolean isInviteTokenValid( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws RemoteInvocationException { return _entityClient.exists(inviteTokenUrn, authentication); } @Nullable - public Urn getInviteTokenRole(@Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + public Urn getInviteTokenRole( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { - final com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + final com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return inviteToken.hasRole() ? inviteToken.getRole() : null; } @Nonnull - public String getInviteToken(@Nullable final String roleUrnStr, boolean regenerate, - @Nonnull final Authentication authentication) throws Exception { + public String getInviteToken( + @Nullable final String roleUrnStr, + boolean regenerate, + @Nonnull final Authentication authentication) + throws Exception { final Filter inviteTokenFilter = roleUrnStr == null ? createInviteTokenFilter() : createInviteTokenFilter(roleUrnStr); final SearchResult searchResult = - _entityClient.filter(INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); + _entityClient.filter( + INVITE_TOKEN_ENTITY_NAME, inviteTokenFilter, null, 0, 10, authentication); final int numEntities = searchResult.getEntities().size(); // If there is more than one invite token, wipe all of them and generate a fresh one @@ -78,14 +84,19 @@ public String getInviteToken(@Nullable final String roleUrnStr, boolean regenera final SearchEntity searchEntity = searchResult.getEntities().get(0); final Urn inviteTokenUrn = searchEntity.getEntity(); - com.linkedin.identity.InviteToken inviteToken = getInviteTokenEntity(inviteTokenUrn, authentication); + com.linkedin.identity.InviteToken inviteToken = + getInviteTokenEntity(inviteTokenUrn, authentication); return _secretService.decrypt(inviteToken.getToken()); } - private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Urn inviteTokenUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + private com.linkedin.identity.InviteToken getInviteTokenEntity( + @Nonnull final Urn inviteTokenUrn, @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { final EntityResponse inviteTokenEntity = - _entityClient.getV2(INVITE_TOKEN_ENTITY_NAME, inviteTokenUrn, Collections.singleton(INVITE_TOKEN_ASPECT_NAME), + _entityClient.getV2( + INVITE_TOKEN_ENTITY_NAME, + inviteTokenUrn, + Collections.singleton(INVITE_TOKEN_ASPECT_NAME), authentication); if (inviteTokenEntity == null) { @@ -96,9 +107,12 @@ private com.linkedin.identity.InviteToken getInviteTokenEntity(@Nonnull final Ur // If invite token aspect is not present, create a new one. Otherwise, return existing one. if (!aspectMap.containsKey(INVITE_TOKEN_ASPECT_NAME)) { throw new RuntimeException( - String.format("Invite token %s does not contain aspect %s", inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); + String.format( + "Invite token %s does not contain aspect %s", + inviteTokenUrn, INVITE_TOKEN_ASPECT_NAME)); } - return new com.linkedin.identity.InviteToken(aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); + return new com.linkedin.identity.InviteToken( + aspectMap.get(INVITE_TOKEN_ASPECT_NAME).getValue().data()); } private Filter createInviteTokenFilter() { @@ -140,7 +154,8 @@ private Filter createInviteTokenFilter(@Nonnull final String roleUrnStr) { } @Nonnull - private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull final Authentication authentication) + private String createInviteToken( + @Nullable final String roleUrnStr, @Nonnull final Authentication authentication) throws Exception { String inviteTokenStr = _secretService.generateUrlSafeToken(INVITE_TOKEN_LENGTH); String hashedInviteTokenStr = _secretService.hashString(inviteTokenStr); @@ -155,21 +170,26 @@ private String createInviteToken(@Nullable final String roleUrnStr, @Nonnull fin // Ingest new InviteToken aspect final MetadataChangeProposal proposal = - buildMetadataChangeProposal(INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, - inviteTokenAspect); + buildMetadataChangeProposal( + INVITE_TOKEN_ENTITY_NAME, inviteTokenKey, INVITE_TOKEN_ASPECT_NAME, inviteTokenAspect); _entityClient.ingestProposal(proposal, authentication); return inviteTokenStr; } - private void deleteExistingInviteTokens(@Nonnull final SearchResult searchResult, - @Nonnull final Authentication authentication) { - searchResult.getEntities().forEach(entity -> { - try { - _entityClient.deleteEntity(entity.getEntity(), authentication); - } catch (RemoteInvocationException e) { - log.error(String.format("Failed to delete invite token entity %s", entity.getEntity()), e); - } - }); + private void deleteExistingInviteTokens( + @Nonnull final SearchResult searchResult, @Nonnull final Authentication authentication) { + searchResult + .getEntities() + .forEach( + entity -> { + try { + _entityClient.deleteEntity(entity.getEntity(), authentication); + } catch (RemoteInvocationException e) { + log.error( + String.format("Failed to delete invite token entity %s", entity.getEntity()), + e); + } + }); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java index c3b7c4bcf3be7..ec5d5f1e436b7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/post/PostService.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Media; import com.linkedin.common.MediaType; @@ -20,10 +23,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class PostService { @@ -38,9 +37,14 @@ public Media mapMedia(@Nonnull String type, @Nonnull String location) { } @Nonnull - public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String title, @Nullable String description, @Nullable String link, + public PostContent mapPostContent( + @Nonnull String contentType, + @Nonnull String title, + @Nullable String description, + @Nullable String link, @Nullable Media media) { - final PostContent postContent = new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); + final PostContent postContent = + new PostContent().setType(PostContentType.valueOf(contentType)).setTitle(title); if (description != null) { postContent.setDescription(description); } @@ -53,15 +57,20 @@ public PostContent mapPostContent(@Nonnull String contentType, @Nonnull String t return postContent; } - public boolean createPost(@Nonnull String postType, @Nonnull PostContent postContent, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public boolean createPost( + @Nonnull String postType, + @Nonnull PostContent postContent, + @Nonnull Authentication authentication) + throws RemoteInvocationException { final String uuid = UUID.randomUUID().toString(); final PostKey postKey = new PostKey().setId(uuid); final long currentTimeMillis = Instant.now().toEpochMilli(); - final PostInfo postInfo = new PostInfo().setType(PostType.valueOf(postType)) - .setContent(postContent) - .setCreated(currentTimeMillis) - .setLastModified(currentTimeMillis); + final PostInfo postInfo = + new PostInfo() + .setType(PostType.valueOf(postType)) + .setContent(postContent) + .setCreated(currentTimeMillis) + .setLastModified(currentTimeMillis); final MetadataChangeProposal proposal = buildMetadataChangeProposal(POST_ENTITY_NAME, postKey, POST_INFO_ASPECT_NAME, postInfo); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java index ea6de3fc7dca0..bb2d5f0efd2c7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolver.java @@ -12,16 +12,15 @@ import java.security.KeyFactory; import java.security.NoSuchAlgorithmException; import java.security.PublicKey; +import java.security.interfaces.RSAPublicKey; import java.security.spec.InvalidKeySpecException; +import java.security.spec.RSAPublicKeySpec; import java.security.spec.X509EncodedKeySpec; import java.util.Base64; import java.util.HashSet; -import java.security.spec.RSAPublicKeySpec; -import java.security.interfaces.RSAPublicKey; import org.json.JSONArray; import org.json.JSONObject; - public class DataHubJwtSigningKeyResolver extends SigningKeyResolverAdapter { public HttpClient client; @@ -38,9 +37,7 @@ public DataHubJwtSigningKeyResolver(HashSet list, String publicKey, Stri client = HttpClient.newHttpClient(); } - /** - * inspect the header or claims, lookup and return the signing key - **/ + /** inspect the header or claims, lookup and return the signing key */ @Override public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { @@ -66,12 +63,11 @@ public Key resolveSigningKey(JwsHeader jwsHeader, Claims claims) { return key; } - /** - * Get public keys from issuer and filter public key for token signature based on token keyId. - **/ + /** Get public keys from issuer and filter public key for token signature based on token keyId. */ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { - HttpRequest request = HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); + HttpRequest request = + HttpRequest.newBuilder().uri(URI.create(issuer + "/protocol/openid-connect/certs")).build(); HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); var body = new JSONObject(response.body()); @@ -87,9 +83,9 @@ private PublicKey loadPublicKey(String issuer, String keyId) throws Exception { } /** - * Generate public key based on token algorithem and public token received from issuer. - * Supported algo RSA - **/ + * Generate public key based on token algorithem and public token received from issuer. Supported + * algo RSA + */ private PublicKey getPublicKey(JSONObject token) throws Exception { PublicKey publicKey = null; @@ -97,8 +93,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { case "RSA": try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); @@ -113,10 +111,7 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { return publicKey; } - /** - * Generate public Key based on algorithem and 64 encoded public key. - * Supported algo RSA - **/ + /** Generate public Key based on algorithem and 64 encoded public key. Supported algo RSA */ private PublicKey generatePublicKey(String alg, String key) throws Exception { PublicKey publicKey = null; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java index 125bba7ec3280..2879f15784370 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatefulTokenService.java @@ -32,10 +32,10 @@ import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang.ArrayUtils; - /** - * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS for use within - * DataHub that are stored in the entity service so that we can list & revoke tokens as needed. + * Service responsible for generating JWT tokens & managing the associated metadata entities in GMS + * for use within DataHub that are stored in the entity service so that we can list & revoke tokens + * as needed. */ @Slf4j public class StatefulTokenService extends StatelessTokenService { @@ -44,47 +44,65 @@ public class StatefulTokenService extends StatelessTokenService { private final LoadingCache _revokedTokenCache; private final String salt; - public StatefulTokenService(@Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss, @Nonnull final EntityService entityService, @Nonnull final String salt) { + public StatefulTokenService( + @Nonnull final String signingKey, + @Nonnull final String signingAlgorithm, + @Nullable final String iss, + @Nonnull final EntityService entityService, + @Nonnull final String salt) { super(signingKey, signingAlgorithm, iss); this._entityService = entityService; - this._revokedTokenCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(5, TimeUnit.MINUTES) - .build(new CacheLoader() { - @Override - public Boolean load(final String key) { - final Urn accessUrn = Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); - return !_entityService.exists(accessUrn); - } - }); + this._revokedTokenCache = + CacheBuilder.newBuilder() + .maximumSize(10000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build( + new CacheLoader() { + @Override + public Boolean load(final String key) { + final Urn accessUrn = + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, key); + return !_entityService.exists(accessUrn); + } + }); this.salt = salt; } /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + *

Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Override public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { - throw new UnsupportedOperationException("Please use generateToken(Token, Actor, String, String, String) endpoint " - + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); + throw new UnsupportedOperationException( + "Please use generateToken(Token, Actor, String, String, String) endpoint " + + "instead. Reason: StatefulTokenService requires that all tokens have a name & ownerUrn specified."); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nonnull final String name, final String description, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nonnull final String name, + final String description, + final String actorUrn) { Date date = new Date(); long timeMilli = date.getTime(); - return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); + return generateAccessToken( + type, actor, DEFAULT_EXPIRES_IN_MS, timeMilli, name, description, actorUrn); } @Nonnull - public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor, - @Nullable final Long expiresInMs, @Nonnull final long createdAtInMs, @Nonnull final String tokenName, - @Nullable final String tokenDescription, final String actorUrn) { + public String generateAccessToken( + @Nonnull final TokenType type, + @Nonnull final Actor actor, + @Nullable final Long expiresInMs, + @Nonnull final long createdAtInMs, + @Nonnull final String tokenName, + @Nullable final String tokenDescription, + final String actorUrn) { Objects.requireNonNull(type); Objects.requireNonNull(actor); @@ -101,7 +119,8 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final final MetadataChangeProposal proposal = new MetadataChangeProposal(); - // Create the access token key --> use a hashed access token value as a unique id to ensure it's not duplicated. + // Create the access token key --> use a hashed access token value as a unique id to ensure it's + // not duplicated. final DataHubAccessTokenKey key = new DataHubAccessTokenKey(); key.setId(tokenHash); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); @@ -124,14 +143,20 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final proposal.setChangeType(ChangeType.UPSERT); log.info("About to ingest access token metadata {}", proposal); - final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); + final AuditStamp auditStamp = + AuditStampUtils.createDefaultAuditStamp().setActor(UrnUtils.getUrn(actorUrn)); - Stream proposalStream = Stream.concat(Stream.of(proposal), + Stream proposalStream = + Stream.concat( + Stream.of(proposal), AspectUtils.getAdditionalChanges(proposal, _entityService).stream()); - _entityService.ingestProposal(AspectsBatchImpl.builder() + _entityService.ingestProposal( + AspectsBatchImpl.builder() .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(), auditStamp, false); + .build(), + auditStamp, + false); return accessToken; } @@ -153,7 +178,8 @@ public TokenClaims validateAccessToken(@Nonnull String accessToken) throws Token this.revokeAccessToken(hash(accessToken)); throw e; } catch (final ExecutionException e) { - throw new TokenException("Failed to validate DataHub token: Unable to load token information from store", e); + throw new TokenException( + "Failed to validate DataHub token: Unable to load token information from store", e); } } @@ -171,9 +197,7 @@ public void revokeAccessToken(@Nonnull String hashedToken) throws TokenException throw new TokenException("Access token no longer exists"); } - /** - * Hashes the input after salting it. - */ + /** Hashes the input after salting it. */ public String hash(String input) { final byte[] saltingKeyBytes = this.salt.getBytes(); final byte[] inputBytes = input.getBytes(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java index fa8ec8c818734..71f12477a33b2 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/StatelessTokenService.java @@ -21,10 +21,9 @@ import javax.annotation.Nullable; import javax.crypto.spec.SecretKeySpec; - /** - * Service responsible for generating JWT tokens for use within DataHub in stateless way. - * This service is responsible only for generating tokens, it will not do anything else with them. + * Service responsible for generating JWT tokens for use within DataHub in stateless way. This + * service is responsible only for generating tokens, it will not do anything else with them. */ public class StatelessTokenService { @@ -40,17 +39,14 @@ public class StatelessTokenService { private final String iss; public StatelessTokenService( - @Nonnull final String signingKey, - @Nonnull final String signingAlgorithm - ) { + @Nonnull final String signingKey, @Nonnull final String signingAlgorithm) { this(signingKey, signingAlgorithm, null); } public StatelessTokenService( @Nonnull final String signingKey, @Nonnull final String signingAlgorithm, - @Nullable final String iss - ) { + @Nullable final String iss) { this.signingKey = Objects.requireNonNull(signingKey); this.signingAlgorithm = validateAlgorithm(Objects.requireNonNull(signingAlgorithm)); this.iss = iss; @@ -59,8 +55,8 @@ public StatelessTokenService( /** * Generates a JWT for an actor with a default expiration time. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + *

Note that the caller of this method is expected to authorize the action of generating a + * token. */ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final Actor actor) { return generateAccessToken(type, actor, DEFAULT_EXPIRES_IN_MS); @@ -69,19 +65,19 @@ public String generateAccessToken(@Nonnull final TokenType type, @Nonnull final /** * Generates a JWT for an actor with a specific duration in milliseconds. * - * Note that the caller of this method is expected to authorize the action of generating a token. - * + *

Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( - @Nonnull final TokenType type, - @Nonnull final Actor actor, - @Nullable final Long expiresInMs) { + @Nonnull final TokenType type, @Nonnull final Actor actor, @Nullable final Long expiresInMs) { Objects.requireNonNull(type); Objects.requireNonNull(actor); Map claims = new HashMap<>(); - claims.put(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TokenClaims.TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TokenClaims.TOKEN_TYPE_CLAIM_NAME, type.toString()); claims.put(TokenClaims.ACTOR_TYPE_CLAIM_NAME, actor.getType()); claims.put(TokenClaims.ACTOR_ID_CLAIM_NAME, actor.getId()); @@ -91,7 +87,8 @@ public String generateAccessToken( /** * Generates a JWT for a custom set of claims. * - * Note that the caller of this method is expected to authorize the action of generating a token. + *

Note that the caller of this method is expected to authorize the action of generating a + * token. */ @Nonnull public String generateAccessToken( @@ -100,10 +97,8 @@ public String generateAccessToken( @Nullable final Long expiresInMs) { Objects.requireNonNull(sub); Objects.requireNonNull(claims); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setSubject(sub); + final JwtBuilder builder = + Jwts.builder().addClaims(claims).setId(UUID.randomUUID().toString()).setSubject(sub); if (expiresInMs != null) { builder.setExpiration(new Date(System.currentTimeMillis() + expiresInMs)); @@ -111,7 +106,7 @@ public String generateAccessToken( if (this.iss != null) { builder.setIssuer(this.iss); } - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final Key signingKey = new SecretKeySpec(apiKeySecretBytes, this.signingAlgorithm.getJcaName()); return builder.signWith(signingKey, this.signingAlgorithm).compact(); } @@ -119,18 +114,16 @@ public String generateAccessToken( /** * Validates a JWT issued by this service. * - * Throws an {@link TokenException} in the case that the token cannot be verified. + *

Throws an {@link TokenException} in the case that the token cannot be verified. */ @Nonnull public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws TokenException { Objects.requireNonNull(accessToken); try { - byte [] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); + byte[] apiKeySecretBytes = this.signingKey.getBytes(StandardCharsets.UTF_8); final String base64Key = Base64.getEncoder().encodeToString(apiKeySecretBytes); - final Jws jws = Jwts.parserBuilder() - .setSigningKey(base64Key) - .build() - .parseClaimsJws(accessToken); + final Jws jws = + Jwts.parserBuilder().setSigningKey(base64Key).build().parseClaimsJws(accessToken); validateTokenAlgorithm(jws.getHeader().getAlgorithm()); final Claims claims = jws.getBody(); final String tokenVersion = claims.get(TokenClaims.TOKEN_VERSION_CLAIM_NAME, String.class); @@ -138,33 +131,37 @@ public TokenClaims validateAccessToken(@Nonnull final String accessToken) throws final String actorId = claims.get(TokenClaims.ACTOR_ID_CLAIM_NAME, String.class); final String actorType = claims.get(TokenClaims.ACTOR_TYPE_CLAIM_NAME, String.class); if (tokenType != null && actorId != null && actorType != null) { - return new TokenClaims( - TokenVersion.fromNumericStringValue(tokenVersion), - TokenType.valueOf(tokenType), - ActorType.valueOf(actorType), - actorId, - claims.getExpiration() == null ? null : claims.getExpiration().getTime()); + return new TokenClaims( + TokenVersion.fromNumericStringValue(tokenVersion), + TokenType.valueOf(tokenType), + ActorType.valueOf(actorType), + actorId, + claims.getExpiration() == null ? null : claims.getExpiration().getTime()); } } catch (io.jsonwebtoken.ExpiredJwtException e) { throw new TokenExpiredException("Failed to validate DataHub token. Token has expired.", e); } catch (Exception e) { throw new TokenException("Failed to validate DataHub token", e); } - throw new TokenException("Failed to validate DataHub token: Found malformed or missing 'actor' claim."); + throw new TokenException( + "Failed to validate DataHub token: Found malformed or missing 'actor' claim."); } private void validateTokenAlgorithm(final String algorithm) throws TokenException { try { validateAlgorithm(algorithm); } catch (UnsupportedOperationException e) { - throw new TokenException(String.format("Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); + throw new TokenException( + String.format( + "Failed to validate signing algorithm for provided JWT! Found %s", algorithm)); } } private SignatureAlgorithm validateAlgorithm(final String algorithm) { if (!SUPPORTED_ALGORITHMS.contains(algorithm)) { throw new UnsupportedOperationException( - String.format("Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); + String.format( + "Failed to create Token Service. Unsupported algorithm %s provided", algorithm)); } return SignatureAlgorithm.valueOf(algorithm); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java index 05890cd2181ab..83e23a07918e7 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenClaims.java @@ -8,10 +8,7 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -/** - * Contains strongly-typed claims that appear in all DataHub granted access tokens. - */ +/** Contains strongly-typed claims that appear in all DataHub granted access tokens. */ public class TokenClaims { public static final String TOKEN_VERSION_CLAIM_NAME = "version"; @@ -21,42 +18,40 @@ public class TokenClaims { public static final String EXPIRATION_CLAIM = "exp"; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenVersion tokenVersion; /** - * The type of the access token, e.g. a session token issued by the frontend or a personal access token - * generated for programmatic use. + * The type of the access token, e.g. a session token issued by the frontend or a personal access + * token generated for programmatic use. */ private final TokenType tokenType; /** * The type of an authenticated DataHub actor. * - * E.g. "urn:li:corpuser:johnsmith" is of type USER. + *

E.g. "urn:li:corpuser:johnsmith" is of type USER. */ private final ActorType actorType; /** * A unique identifier for an actor of a particular type. * - * E.g. "johnsmith" inside urn:li:corpuser:johnsmith. + *

E.g. "johnsmith" inside urn:li:corpuser:johnsmith. */ private final String actorId; - /** - * The expiration time in milliseconds if one exists, null otherwise. - */ + /** The expiration time in milliseconds if one exists, null otherwise. */ private final Long expirationInMs; public TokenClaims( - @Nonnull TokenVersion tokenVersion, - @Nonnull TokenType tokenType, - @Nonnull final ActorType actorType, - @Nonnull final String actorId, - @Nullable Long expirationInMs) { + @Nonnull TokenVersion tokenVersion, + @Nonnull TokenType tokenType, + @Nonnull final ActorType actorType, + @Nonnull final String actorId, + @Nullable Long expirationInMs) { Objects.requireNonNull(tokenVersion); Objects.requireNonNull(tokenType); Objects.requireNonNull(actorType); @@ -68,51 +63,38 @@ public TokenClaims( this.expirationInMs = expirationInMs; } - /** - * Returns the version of the access token - */ + /** Returns the version of the access token */ public TokenVersion getTokenVersion() { return this.tokenVersion; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public TokenType getTokenType() { return this.tokenType; } - /** - * Returns the type of an authenticated DataHub actor. - */ + /** Returns the type of an authenticated DataHub actor. */ public ActorType getActorType() { return this.actorType; } - /** - * Returns the expiration time in milliseconds if one exists, null otherwise. - */ + /** Returns the expiration time in milliseconds if one exists, null otherwise. */ public Long getExpirationInMs() { return this.expirationInMs; } - /** - * Returns a unique id associated with a DataHub actor of a particular type. - */ + /** Returns a unique id associated with a DataHub actor of a particular type. */ public String getActorId() { return this.actorId; } - /** - * Returns the claims in the DataHub Access token as a map. - */ + /** Returns the claims in the DataHub Access token as a map. */ public Map asMap() { return ImmutableMap.of( TOKEN_VERSION_CLAIM_NAME, this.tokenVersion.numericValue, TOKEN_TYPE_CLAIM_NAME, this.tokenType.toString(), ACTOR_TYPE_CLAIM_NAME, this.actorType.toString(), ACTOR_ID_CLAIM_NAME, this.actorId, - EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs) - ); + EXPIRATION_CLAIM, Optional.ofNullable(this.expirationInMs)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java index 24b6daa830f47..9d239482f85f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenException extends Exception { public TokenException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java index 6c4e5e037d4da..ae5d2daddcc0e 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenExpiredException.java @@ -1,8 +1,6 @@ package com.datahub.authentication.token; -/** - * A checked exception that is thrown when a DataHub-issued access token cannot be verified. - */ +/** A checked exception that is thrown when a DataHub-issued access token cannot be verified. */ public class TokenExpiredException extends TokenException { public TokenExpiredException(final String message) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java index ca5de37b0fad4..475f79da3805c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenType.java @@ -1,16 +1,10 @@ package com.datahub.authentication.token; -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenType { - /** - * A UI-initiated session token - */ + /** A UI-initiated session token */ SESSION, - /** - * A personal token for programmatic use - */ + /** A personal token for programmatic use */ PERSONAL; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java index 8f9189bf17b95..f1b362b71dfb4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/token/TokenVersion.java @@ -4,19 +4,14 @@ import java.util.Objects; import java.util.Optional; - -/** - * Represents a type of JWT access token granted by the {@link StatelessTokenService}. - */ +/** Represents a type of JWT access token granted by the {@link StatelessTokenService}. */ public enum TokenVersion { - /** - * The first version of the DataHub access token. - */ + /** The first version of the DataHub access token. */ ONE(1), /** - * The second version of the DataHub access token (latest). - * Used to represent tokens that are stateful and are stored within DataHub. + * The second version of the DataHub access token (latest). Used to represent tokens that are + * stateful and are stored within DataHub. */ TWO(2); @@ -26,37 +21,35 @@ public enum TokenVersion { this.numericValue = numericValue; } - /** - * Returns the numeric representation of the version - */ + /** Returns the numeric representation of the version */ public int getNumericValue() { return this.numericValue; } - /** - * Returns a {@link TokenVersion} provided a numeric token version. - */ + /** Returns a {@link TokenVersion} provided a numeric token version. */ public static TokenVersion fromNumericValue(int num) { - Optional maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> num == version.getNumericValue()) - .findFirst(); + Optional maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> num == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } - /** - * Returns a {@link TokenVersion} provided a stringified numeric token version. - */ + /** Returns a {@link TokenVersion} provided a stringified numeric token version. */ public static TokenVersion fromNumericStringValue(String num) { Objects.requireNonNull(num); - Optional maybeVersion = Arrays.stream(TokenVersion.values()) - .filter(version -> Integer.parseInt(num) == version.getNumericValue()) - .findFirst(); + Optional maybeVersion = + Arrays.stream(TokenVersion.values()) + .filter(version -> Integer.parseInt(num) == version.getNumericValue()) + .findFirst(); if (maybeVersion.isPresent()) { return maybeVersion.get(); } - throw new IllegalArgumentException(String.format("Failed to find DataHubAccessTokenVersion %s", num)); + throw new IllegalArgumentException( + String.format("Failed to find DataHubAccessTokenVersion %s", num)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index bff675ddd9cb2..741d176f98c1b 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -1,5 +1,7 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationConfiguration; import com.linkedin.common.AuditStamp; @@ -22,12 +24,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - -/** - * Service responsible for creating, updating and authenticating native DataHub users. - */ +/** Service responsible for creating, updating and authenticating native DataHub users. */ @Slf4j @RequiredArgsConstructor public class NativeUserService { @@ -38,8 +35,14 @@ public class NativeUserService { private final SecretService _secretService; private final AuthenticationConfiguration _authConfig; - public void createNativeUser(@Nonnull String userUrnString, @Nonnull String fullName, @Nonnull String email, - @Nonnull String title, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { + public void createNativeUser( + @Nonnull String userUrnString, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + @Nonnull String password, + @Nonnull Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(fullName, "fullName must not be null!"); Objects.requireNonNull(email, "email must not be null!"); @@ -49,7 +52,8 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full final Urn userUrn = Urn.createFromString(userUrnString); if (_entityService.exists(userUrn) - // Should never fail these due to Controller level check, but just in case more usages get put in + // Should never fail these due to Controller level check, but just in case more usages get + // put in || userUrn.toString().equals(SYSTEM_ACTOR) || userUrn.toString().equals(new CorpuserUrn(_authConfig.getSystemClientId()).toString()) || userUrn.toString().equals(DATAHUB_ACTOR) @@ -61,8 +65,13 @@ public void createNativeUser(@Nonnull String userUrnString, @Nonnull String full updateCorpUserCredentials(userUrn, password, authentication); } - void updateCorpUserInfo(@Nonnull Urn userUrn, @Nonnull String fullName, @Nonnull String email, @Nonnull String title, - Authentication authentication) throws Exception { + void updateCorpUserInfo( + @Nonnull Urn userUrn, + @Nonnull String fullName, + @Nonnull String email, + @Nonnull String title, + Authentication authentication) + throws Exception { // Construct corpUserInfo final CorpUserInfo corpUserInfo = new CorpUserInfo(); corpUserInfo.setFullName(fullName); @@ -86,7 +95,9 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t CorpUserStatus corpUserStatus = new CorpUserStatus(); corpUserStatus.setStatus(CORP_USER_STATUS_ACTIVE); corpUserStatus.setLastModified( - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis())); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis())); // Ingest corpUserStatus MCP final MetadataChangeProposal corpUserStatusProposal = new MetadataChangeProposal(); @@ -98,7 +109,8 @@ void updateCorpUserStatus(@Nonnull Urn userUrn, Authentication authentication) t _entityClient.ingestProposal(corpUserStatusProposal, authentication); } - void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) + void updateCorpUserCredentials( + @Nonnull Urn userUrn, @Nonnull String password, @Nonnull Authentication authentication) throws Exception { // Construct corpUserCredentials CorpUserCredentials corpUserCredentials = new CorpUserCredentials(); @@ -118,15 +130,18 @@ void updateCorpUserCredentials(@Nonnull Urn userUrn, @Nonnull String password, @ _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString, Authentication authentication) - throws Exception { + public String generateNativeUserPasswordResetToken( + @Nonnull String userUrnString, Authentication authentication) throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist or is a non-native user!"); } // Add reset token to CorpUserCredentials @@ -148,8 +163,12 @@ public String generateNativeUserPasswordResetToken(@Nonnull String userUrnString return passwordResetToken; } - public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull String password, - @Nonnull String resetToken, Authentication authentication) throws Exception { + public void resetCorpUserCredentials( + @Nonnull String userUrnString, + @Nonnull String password, + @Nonnull String resetToken, + Authentication authentication) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnString must not be null!"); Objects.requireNonNull(password, "password must not be null!"); Objects.requireNonNull(resetToken, "resetToken must not be null!"); @@ -157,24 +176,30 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { throw new RuntimeException("User does not exist!"); } - if (!corpUserCredentials.hasPasswordResetToken() || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() + if (!corpUserCredentials.hasPasswordResetToken() + || !corpUserCredentials.hasPasswordResetTokenExpirationTimeMillis() || corpUserCredentials.getPasswordResetTokenExpirationTimeMillis() == null) { throw new RuntimeException("User has not generated a password reset token!"); } if (!_secretService.decrypt(corpUserCredentials.getPasswordResetToken()).equals(resetToken)) { - throw new RuntimeException("Invalid reset token. Please ask your administrator to send you an updated link!"); + throw new RuntimeException( + "Invalid reset token. Please ask your administrator to send you an updated link!"); } long currentTimeMillis = Instant.now().toEpochMilli(); if (currentTimeMillis > corpUserCredentials.getPasswordResetTokenExpirationTimeMillis()) { - throw new RuntimeException("Reset token has expired! Please ask your administrator to create a new one"); + throw new RuntimeException( + "Reset token has expired! Please ask your administrator to create a new one"); } // Construct corpUserCredentials @@ -194,14 +219,18 @@ public void resetCorpUserCredentials(@Nonnull String userUrnString, @Nonnull Str _entityClient.ingestProposal(corpUserCredentialsProposal, authentication); } - public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) throws Exception { + public boolean doesPasswordMatch(@Nonnull String userUrnString, @Nonnull String password) + throws Exception { Objects.requireNonNull(userUrnString, "userUrnSting must not be null!"); Objects.requireNonNull(password, "Password must not be null!"); Urn userUrn = Urn.createFromString(userUrnString); CorpUserCredentials corpUserCredentials = - (CorpUserCredentials) _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); - if (corpUserCredentials == null || !corpUserCredentials.hasSalt() || !corpUserCredentials.hasHashedPassword()) { + (CorpUserCredentials) + _entityService.getLatestAspect(userUrn, CORP_USER_CREDENTIALS_ASPECT_NAME); + if (corpUserCredentials == null + || !corpUserCredentials.hasSalt() + || !corpUserCredentials.hasHashedPassword()) { return false; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java index 7e7a1de176f06..9e8c1928c9de0 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/AuthorizerChain.java @@ -14,12 +14,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an inbound request. + * A configurable chain of {@link Authorizer}s executed in series to attempt to authenticate an + * inbound request. * - * Individual {@link Authorizer}s are registered with the chain using {@link #register(Authorizer)}. - * The chain can be executed by invoking {@link #authorize(AuthorizationRequest)}. + *

Individual {@link Authorizer}s are registered with the chain using {@link + * #register(Authorizer)}. The chain can be executed by invoking {@link + * #authorize(AuthorizationRequest)}. */ @Slf4j public class AuthorizerChain implements Authorizer { @@ -41,7 +42,7 @@ public void init(@Nonnull Map authorizerConfig, @Nonnull Authori /** * Executes a set of {@link Authorizer}s and returns the first successful authentication result. * - * Returns an instance of {@link AuthorizationResult}. + *

Returns an instance of {@link AuthorizationResult}. */ @Nullable public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { @@ -51,10 +52,13 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request for (final Authorizer authorizer : this.authorizers) { try { - log.debug("Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); + log.debug( + "Executing Authorizer with class name {}", authorizer.getClass().getCanonicalName()); log.debug("Authorization Request: {}", request.toString()); - // The library came with plugin can use the contextClassLoader to load the classes. For example apache-ranger library does this. - // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class loading request from plugin's home directory, + // The library came with plugin can use the contextClassLoader to load the classes. For + // example apache-ranger library does this. + // Here we need to set our IsolatedClassLoader as contextClassLoader to resolve such class + // loading request from plugin's home directory, // otherwise plugin's internal library wouldn't be able to find their dependent classes Thread.currentThread().setContextClassLoader(authorizer.getClass().getClassLoader()); AuthorizationResult result = authorizer.authorize(request); @@ -67,12 +71,16 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return result; } else { - log.debug("Received DENY result from Authorizer with class name {}. message: {}", - authorizer.getClass().getCanonicalName(), result.getMessage()); + log.debug( + "Received DENY result from Authorizer with class name {}. message: {}", + authorizer.getClass().getCanonicalName(), + result.getMessage()); } } catch (Exception e) { - log.error("Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", - authorizer.getClass().getCanonicalName(), e); + log.error( + "Caught exception while attempting to authorize request using Authorizer {}. Skipping authorizer.", + authorizer.getClass().getCanonicalName(), + e); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } @@ -87,16 +95,19 @@ public AuthorizedActors authorizedActors(String privilege, Optional return null; } - AuthorizedActors finalAuthorizedActors = this.authorizers.get(0).authorizedActors(privilege, resourceSpec); + AuthorizedActors finalAuthorizedActors = + this.authorizers.get(0).authorizedActors(privilege, resourceSpec); for (int i = 1; i < this.authorizers.size(); i++) { - finalAuthorizedActors = mergeAuthorizedActors(finalAuthorizedActors, - this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); + finalAuthorizedActors = + mergeAuthorizedActors( + finalAuthorizedActors, + this.authorizers.get(i).authorizedActors(privilege, resourceSpec)); } return finalAuthorizedActors; } - private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors original, - @Nullable AuthorizedActors other) { + private AuthorizedActors mergeAuthorizedActors( + @Nullable AuthorizedActors original, @Nullable AuthorizedActors other) { if (original == null) { return other; } @@ -139,10 +150,8 @@ private AuthorizedActors mergeAuthorizedActors(@Nullable AuthorizedActors origin .build(); } - /** - * Returns an instance of default {@link DataHubAuthorizer} - */ + /** Returns an instance of default {@link DataHubAuthorizer} */ public DataHubAuthorizer getDefaultAuthorizer() { return (DataHubAuthorizer) defaultAuthorizer; } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index 956d635c7901a..9ae95bd4e92b6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -8,7 +8,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.policy.DataHubPolicyInfo; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashMap; @@ -26,26 +25,23 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * The Authorizer is a singleton class responsible for authorizing - * operations on the DataHub platform via DataHub Policies. + * The Authorizer is a singleton class responsible for authorizing operations on the DataHub + * platform via DataHub Policies. * - * Currently, the authorizer is implemented as a spring-instantiated Singleton - * which manages its own thread-pool used for resolving policy predicates. + *

Currently, the authorizer is implemented as a spring-instantiated Singleton which manages its + * own thread-pool used for resolving policy predicates. */ // TODO: Decouple this from all Rest.li objects if possible. @Slf4j public class DataHubAuthorizer implements Authorizer { public enum AuthorizationMode { - /** - * Default mode simply means that authorization is enforced, with a DENY result returned - */ + /** Default mode simply means that authorization is enforced, with a DENY result returned */ DEFAULT, /** - * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an override to disable the - * policies feature. + * Allow all means that the DataHubAuthorizer will allow all actions. This is used as an + * override to disable the policies feature. */ ALLOW_ALL } @@ -55,11 +51,13 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. - private final Map> _policyCache = new HashMap<>(); // Shared Policy Cache. + private final Map> _policyCache = + new HashMap<>(); // Shared Policy Cache. private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); private final Lock readLock = readWriteLock.readLock(); - private final ScheduledExecutorService _refreshExecutorService = Executors.newScheduledThreadPool(1); + private final ScheduledExecutorService _refreshExecutorService = + Executors.newScheduledThreadPool(1); private final PolicyRefreshRunnable _policyRefreshRunnable; private final PolicyEngine _policyEngine; private EntitySpecResolver _entitySpecResolver; @@ -77,9 +75,15 @@ public DataHubAuthorizer( _systemAuthentication = Objects.requireNonNull(systemAuthentication); _mode = Objects.requireNonNull(mode); _policyEngine = new PolicyEngine(systemAuthentication, Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = new PolicyRefreshRunnable(systemAuthentication, new PolicyFetcher(entityClient), _policyCache, - readWriteLock.writeLock(), policyFetchSize); - _refreshExecutorService.scheduleAtFixedRate(_policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); + _policyRefreshRunnable = + new PolicyRefreshRunnable( + systemAuthentication, + new PolicyFetcher(entityClient), + _policyCache, + readWriteLock.writeLock(), + policyFetchSize); + _refreshExecutorService.scheduleAtFixedRate( + _policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); } @Override @@ -95,41 +99,48 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, null); } - Optional resolvedResourceSpec = request.getResourceSpec().map(_entitySpecResolver::resolve); + Optional resolvedResourceSpec = + request.getResourceSpec().map(_entitySpecResolver::resolve); // 1. Fetch the policies relevant to the requested privilege. - final List policiesToEvaluate = getOrDefault(request.getPrivilege(), new ArrayList<>()); + final List policiesToEvaluate = + getOrDefault(request.getPrivilege(), new ArrayList<>()); // 2. Evaluate each policy. for (DataHubPolicyInfo policy : policiesToEvaluate) { if (isRequestGranted(policy, request, resolvedResourceSpec)) { // Short circuit if policy has granted privileges to this actor. - return new AuthorizationResult(request, AuthorizationResult.Type.ALLOW, + return new AuthorizationResult( + request, + AuthorizationResult.Type.ALLOW, String.format("Granted by policy with type: %s", policy.getType())); } } - return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); + return new AuthorizationResult(request, AuthorizationResult.Type.DENY, null); } - public List getGrantedPrivileges(final String actor, final Optional resourceSpec) { + public List getGrantedPrivileges( + final String actor, final Optional resourceSpec) { // 1. Fetch all policies final List policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); Urn actorUrn = UrnUtils.getUrn(actor); - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges(policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); + return _policyEngine.getGrantedPrivileges( + policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } /** - * Retrieves the current list of actors authorized to for a particular privilege against - * an optional resource + * Retrieves the current list of actors authorized to for a particular privilege against an + * optional resource */ public AuthorizedActors authorizedActors( - final String privilege, - final Optional resourceSpec) { + final String privilege, final Optional resourceSpec) { final List authorizedUsers = new ArrayList<>(); final List authorizedGroups = new ArrayList<>(); @@ -140,7 +151,8 @@ public AuthorizedActors authorizedActors( // Step 1: Find policies granting the privilege. final List policiesToEvaluate = getOrDefault(privilege, new ArrayList<>()); - Optional resolvedResourceSpec = resourceSpec.map(_entitySpecResolver::resolve); + Optional resolvedResourceSpec = + resourceSpec.map(_entitySpecResolver::resolve); // Step 2: For each policy, determine whether the resource is a match. for (DataHubPolicyInfo policy : policiesToEvaluate) { @@ -149,7 +161,8 @@ public AuthorizedActors authorizedActors( continue; } - final PolicyEngine.PolicyActors matchingActors = _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + final PolicyEngine.PolicyActors matchingActors = + _policyEngine.getMatchingActors(policy, resolvedResourceSpec); // Step 3: For each matching policy, add actors that are authorized. authorizedUsers.addAll(matchingActors.getUsers()); @@ -164,12 +177,13 @@ public AuthorizedActors authorizedActors( } // Step 4: Return all authorized users and groups. - return new AuthorizedActors(privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); + return new AuthorizedActors( + privilege, authorizedUsers, authorizedGroups, authorizedRoles, allUsers, allGroups); } /** - * Invalidates the policy cache and fires off a refresh thread. Should be invoked - * when a policy is created, modified, or deleted. + * Invalidates the policy cache and fires off a refresh thread. Should be invoked when a policy is + * created, modified, or deleted. */ public void invalidateCache() { _refreshExecutorService.execute(_policyRefreshRunnable); @@ -184,17 +198,19 @@ public void setMode(final AuthorizationMode mode) { } /** - * Returns true if the request's is coming from the system itself, in which cases - * the action is always authorized. + * Returns true if the request's is coming from the system itself, in which cases the action is + * always authorized. */ - private boolean isSystemRequest(final AuthorizationRequest request, final Authentication systemAuthentication) { + private boolean isSystemRequest( + final AuthorizationRequest request, final Authentication systemAuthentication) { return systemAuthentication.getActor().toUrnStr().equals(request.getActorUrn()); } - /** - * Returns true if a policy grants the requested privilege for a given actor and resource. - */ - private boolean isRequestGranted(final DataHubPolicyInfo policy, final AuthorizationRequest request, final Optional resourceSpec) { + /** Returns true if a policy grants the requested privilege for a given actor and resource. */ + private boolean isRequestGranted( + final DataHubPolicyInfo policy, + final AuthorizationRequest request, + final Optional resourceSpec) { if (AuthorizationMode.ALLOW_ALL.equals(mode())) { return true; } @@ -204,14 +220,12 @@ private boolean isRequestGranted(final DataHubPolicyInfo policy, final Authoriza return false; } - final ResolvedEntitySpec resolvedActorSpec = _entitySpecResolver.resolve( + final ResolvedEntitySpec resolvedActorSpec = + _entitySpecResolver.resolve( new EntitySpec(actorUrn.get().getEntityType(), request.getActorUrn())); - final PolicyEngine.PolicyEvaluationResult result = _policyEngine.evaluatePolicy( - policy, - resolvedActorSpec, - request.getPrivilege(), - resourceSpec - ); + final PolicyEngine.PolicyEvaluationResult result = + _policyEngine.evaluatePolicy( + policy, resolvedActorSpec, request.getPrivilege(), resourceSpec); return result.isGranted(); } @@ -219,7 +233,10 @@ private Optional getUrnFromRequestActor(String actor) { try { return Optional.of(Urn.createFromString(actor)); } catch (URISyntaxException e) { - log.error(String.format("Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", actor)); + log.error( + String.format( + "Failed to bind actor %s to an URN. Actors must be URNs. Denying the authorization request", + actor)); return Optional.empty(); } } @@ -237,8 +254,8 @@ private List getOrDefault(String key, List /** * A {@link Runnable} used to periodically fetch a new instance of the policies Cache. * - * Currently, the refresh logic is not very smart. When the cache is invalidated, we simply re-fetch the - * entire cache using Policies stored in the backend. + *

Currently, the refresh logic is not very smart. When the cache is invalidated, we simply + * re-fetch the entire cache using Policies stored in the backend. */ @VisibleForTesting @RequiredArgsConstructor @@ -260,8 +277,8 @@ public void run() { while (total == null || scrollId != null) { try { - final PolicyFetcher.PolicyFetchResult - policyFetchResult = _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); + final PolicyFetcher.PolicyFetchResult policyFetchResult = + _policyFetcher.fetchPolicies(count, scrollId, _systemAuthentication); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); @@ -269,7 +286,10 @@ public void run() { scrollId = policyFetchResult.getScrollId(); } catch (Exception e) { log.error( - "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", count, scrollId, e); + "Failed to retrieve policy urns! Skipping updating policy cache until next refresh. count: {}, scrollId: {}", + count, + scrollId, + e); return; } } @@ -285,23 +305,31 @@ public void run() { log.debug(String.format("Successfully fetched %s policies.", total)); } catch (Exception e) { - log.error("Caught exception while loading Policy cache. Will retry on next scheduled attempt.", e); + log.error( + "Caught exception while loading Policy cache. Will retry on next scheduled attempt.", + e); } } - private void addPoliciesToCache(final Map> cache, + private void addPoliciesToCache( + final Map> cache, final List policies) { policies.forEach(policy -> addPolicyToCache(cache, policy.getPolicyInfo())); } - private void addPolicyToCache(final Map> cache, final DataHubPolicyInfo policy) { + private void addPolicyToCache( + final Map> cache, final DataHubPolicyInfo policy) { final List privileges = policy.getPrivileges(); for (String privilege : privileges) { - List existingPolicies = cache.containsKey(privilege) ? new ArrayList<>(cache.get(privilege)) : new ArrayList<>(); + List existingPolicies = + cache.containsKey(privilege) + ? new ArrayList<>(cache.get(privilege)) + : new ArrayList<>(); existingPolicies.add(policy); cache.put(privilege, existingPolicies); } - List existingPolicies = cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); + List existingPolicies = + cache.containsKey(ALL) ? new ArrayList<>(cache.get(ALL)) : new ArrayList<>(); existingPolicies.add(policy); cache.put(ALL, existingPolicies); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java index 65b0329a9c4f2..c2d9c42693311 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DefaultEntitySpecResolver.java @@ -15,13 +15,14 @@ import java.util.Map; import java.util.stream.Collectors; - public class DefaultEntitySpecResolver implements EntitySpecResolver { private final List _entityFieldResolverProviders; public DefaultEntitySpecResolver(Authentication systemAuthentication, EntityClient entityClient) { _entityFieldResolverProviders = - ImmutableList.of(new EntityTypeFieldResolverProvider(), new EntityUrnFieldResolverProvider(), + ImmutableList.of( + new EntityTypeFieldResolverProvider(), + new EntityUrnFieldResolverProvider(), new DomainFieldResolverProvider(entityClient, systemAuthentication), new OwnerFieldResolverProvider(entityClient, systemAuthentication), new DataPlatformInstanceFieldResolverProvider(entityClient, systemAuthentication), @@ -35,7 +36,10 @@ public ResolvedEntitySpec resolve(EntitySpec entitySpec) { private Map getFieldResolvers(EntitySpec entitySpec) { return _entityFieldResolverProviders.stream() - .flatMap(resolver -> resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) - .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); + .flatMap( + resolver -> + resolver.getFieldTypes().stream().map(fieldType -> Pair.of(fieldType, resolver))) + .collect( + Collectors.toMap(Pair::getKey, pair -> pair.getValue().getFieldResolver(entitySpec))); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java index 0dbb9cd132f8a..e4f6b483e09f8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/FilterUtils.java @@ -13,55 +13,56 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class FilterUtils { public static final PolicyMatchFilter EMPTY_FILTER = new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray()); - private FilterUtils() { - } + private FilterUtils() {} - /** - * Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field and value, using EQUAL PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List values) { + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, @Nonnull List values) { return newCriterion(field, values, PolicyMatchCondition.EQUALS); } - /** - * Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. - */ + /** Creates new PolicyMatchCriterion with field, value and PolicyMatchCondition. */ @Nonnull - public static PolicyMatchCriterion newCriterion(@Nonnull EntityFieldType field, @Nonnull List values, + public static PolicyMatchCriterion newCriterion( + @Nonnull EntityFieldType field, + @Nonnull List values, @Nonnull PolicyMatchCondition policyMatchCondition) { - return new PolicyMatchCriterion().setField(field.name()) + return new PolicyMatchCriterion() + .setField(field.name()) .setValues(new StringArray(values)) .setCondition(policyMatchCondition); } /** - * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a map of Criteria by removing null-valued Criteria and using + * EQUAL PolicyMatchCondition (default). */ @Nonnull public static PolicyMatchFilter newFilter(@Nullable Map> params) { if (params == null) { return EMPTY_FILTER; } - PolicyMatchCriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); + PolicyMatchCriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(PolicyMatchCriterionArray::new)); return new PolicyMatchFilter().setCriteria(criteria); } /** - * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL PolicyMatchCondition (default). + * Creates new PolicyMatchFilter from a single PolicyMatchCriterion with EQUAL + * PolicyMatchCondition (default). */ @Nonnull - public static PolicyMatchFilter newFilter(@Nonnull EntityFieldType field, @Nonnull List values) { + public static PolicyMatchFilter newFilter( + @Nonnull EntityFieldType field, @Nonnull List values) { return newFilter(Collections.singletonMap(field, values)); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index da0ae26f2b1da..123e5f3c55932 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Owner; import com.linkedin.common.Ownership; @@ -20,7 +22,6 @@ import com.linkedin.policy.PolicyMatchCriterion; import com.linkedin.policy.PolicyMatchCriterionArray; import com.linkedin.policy.PolicyMatchFilter; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -31,16 +32,12 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nullable; - import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class PolicyEngine { @@ -59,13 +56,19 @@ public PolicyEvaluationResult evaluatePolicy( // If the privilege is not in scope, deny the request. if (!isPrivilegeMatch(privilege, policy.getPrivileges())) { - log.debug("Policy denied based on irrelevant privileges {} for {}", policy.getPrivileges(), privilege); + log.debug( + "Policy denied based on irrelevant privileges {} for {}", + policy.getPrivileges(), + privilege); return PolicyEvaluationResult.DENIED; } // If policy is not applicable, deny the request if (!isPolicyApplicable(policy, resolvedActorSpec, resource, context)) { - log.debug("Policy does not applicable for actor {} and resource {}", resolvedActorSpec.getSpec().getEntity(), resource); + log.debug( + "Policy does not applicable for actor {} and resource {}", + resolvedActorSpec.getSpec().getEntity(), + resource); return PolicyEvaluationResult.DENIED; } @@ -74,8 +77,7 @@ public PolicyEvaluationResult evaluatePolicy( } public PolicyActors getMatchingActors( - final DataHubPolicyInfo policy, - final Optional resource) { + final DataHubPolicyInfo policy, final Optional resource) { final List users = new ArrayList<>(); final List groups = new ArrayList<>(); final List roles = new ArrayList<>(); @@ -118,8 +120,7 @@ private boolean isPolicyApplicable( final DataHubPolicyInfo policy, final ResolvedEntitySpec resolvedActorSpec, final Optional resource, - final PolicyEvaluationContext context - ) { + final PolicyEvaluationContext context) { // If policy is inactive, simply return DENY. if (PoliciesConfig.INACTIVE_POLICY_STATE.equals(policy.getState())) { @@ -150,25 +151,27 @@ public List getGrantedPrivileges( /** * Returns true if the policy matches the resource spec, false otherwise. * - * If the policy is of type "PLATFORM", the resource will always match (since there's no resource). - * If the policy is of type "METADATA", the resourceSpec parameter will be matched against the - * resource filter defined on the policy. + *

If the policy is of type "PLATFORM", the resource will always match (since there's no + * resource). If the policy is of type "METADATA", the resourceSpec parameter will be matched + * against the resource filter defined on the policy. */ - public Boolean policyMatchesResource(final DataHubPolicyInfo policy, final Optional resourceSpec) { + public Boolean policyMatchesResource( + final DataHubPolicyInfo policy, final Optional resourceSpec) { return isResourceMatch(policy.getType(), policy.getResources(), resourceSpec); } /** - * Returns true if the privilege portion of a DataHub policy matches a the privilege being evaluated, false otherwise. + * Returns true if the privilege portion of a DataHub policy matches a the privilege being + * evaluated, false otherwise. */ private boolean isPrivilegeMatch( - final String requestPrivilege, - final List policyPrivileges) { + final String requestPrivilege, final List policyPrivileges) { return policyPrivileges.contains(requestPrivilege); } /** - * Returns true if the resource portion of a DataHub policy matches a the resource being evaluated, false otherwise. + * Returns true if the resource portion of a DataHub policy matches a the resource being + * evaluated, false otherwise. */ private boolean isResourceMatch( final String policyType, @@ -192,8 +195,8 @@ private boolean isResourceMatch( } /** - * Get filter object from policy resource filter. Make sure it is backward compatible by constructing PolicyMatchFilter object - * from other fields if the filter field is not set + * Get filter object from policy resource filter. Make sure it is backward compatible by + * constructing PolicyMatchFilter object from other fields if the filter field is not set */ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) { if (policyResourceFilter.hasFilter()) { @@ -201,13 +204,19 @@ private PolicyMatchFilter getFilter(DataHubResourceFilter policyResourceFilter) } PolicyMatchCriterionArray criteria = new PolicyMatchCriterionArray(); if (policyResourceFilter.hasType()) { - criteria.add(new PolicyMatchCriterion().setField(EntityFieldType.TYPE.name()) - .setValues(new StringArray(Collections.singletonList(policyResourceFilter.getType())))); + criteria.add( + new PolicyMatchCriterion() + .setField(EntityFieldType.TYPE.name()) + .setValues( + new StringArray(Collections.singletonList(policyResourceFilter.getType())))); } - if (policyResourceFilter.hasType() && policyResourceFilter.hasResources() + if (policyResourceFilter.hasType() + && policyResourceFilter.hasResources() && !policyResourceFilter.isAllResources()) { criteria.add( - new PolicyMatchCriterion().setField(EntityFieldType.URN.name()).setValues(policyResourceFilter.getResources())); + new PolicyMatchCriterion() + .setField(EntityFieldType.URN.name()) + .setValues(policyResourceFilter.getResources())); } return new PolicyMatchFilter().setCriteria(criteria); } @@ -216,7 +225,8 @@ private boolean checkFilter(final PolicyMatchFilter filter, final ResolvedEntity return filter.getCriteria().stream().allMatch(criterion -> checkCriterion(criterion, resource)); } - private boolean checkCriterion(final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { + private boolean checkCriterion( + final PolicyMatchCriterion criterion, final ResolvedEntitySpec resource) { EntityFieldType entityFieldType; try { entityFieldType = EntityFieldType.valueOf(criterion.getField().toUpperCase()); @@ -226,12 +236,13 @@ private boolean checkCriterion(final PolicyMatchCriterion criterion, final Resol } Set fieldValues = resource.getFieldValues(entityFieldType); - return criterion.getValues() - .stream() - .anyMatch(filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); + return criterion.getValues().stream() + .anyMatch( + filterValue -> checkCondition(fieldValues, filterValue, criterion.getCondition())); } - private boolean checkCondition(Set fieldValues, String filterValue, PolicyMatchCondition condition) { + private boolean checkCondition( + Set fieldValues, String filterValue, PolicyMatchCondition condition) { if (condition == PolicyMatchCondition.EQUALS) { return fieldValues.contains(filterValue); } @@ -240,8 +251,9 @@ private boolean checkCondition(Set fieldValues, String filterValue, Poli } /** - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. - * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, false otherwise. + * Returns true if the actor portion of a DataHub policy matches a the actor being evaluated, + * false otherwise. Returns true if the actor portion of a DataHub policy matches a the actor + * being evaluated, false otherwise. */ private boolean isActorMatch( final ResolvedEntitySpec resolvedActorSpec, @@ -259,7 +271,8 @@ private boolean isActorMatch( return true; } - // 3. If the actor is the owner, either directly or indirectly via a group, return true immediately. + // 3. If the actor is the owner, either directly or indirectly via a group, return true + // immediately. if (isOwnerMatch(resolvedActorSpec, actorFilter, resourceSpec, context)) { return true; } @@ -268,11 +281,14 @@ private boolean isActorMatch( return isRoleMatch(resolvedActorSpec, actorFilter, context); } - private boolean isUserMatch(final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { + private boolean isUserMatch( + final ResolvedEntitySpec resolvedActorSpec, final DataHubActorFilter actorFilter) { // If the actor is a matching "User" in the actor filter, return true immediately. - return actorFilter.isAllUsers() || (actorFilter.hasUsers() && Objects.requireNonNull(actorFilter.getUsers()) - .stream().map(Urn::toString) - .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); + return actorFilter.isAllUsers() + || (actorFilter.hasUsers() + && Objects.requireNonNull(actorFilter.getUsers()).stream() + .map(Urn::toString) + .anyMatch(user -> user.equals(resolvedActorSpec.getSpec().getEntity()))); } private boolean isGroupMatch( @@ -283,9 +299,10 @@ private boolean isGroupMatch( if (actorFilter.isAllGroups() || actorFilter.hasGroups()) { final Set groups = resolveGroups(resolvedActorSpec, context); return (actorFilter.isAllGroups() && !groups.isEmpty()) - || (actorFilter.hasGroups() && Objects.requireNonNull(actorFilter.getGroups()) - .stream().map(Urn::toString) - .anyMatch(groups::contains)); + || (actorFilter.hasGroups() + && Objects.requireNonNull(actorFilter.getGroups()).stream() + .map(Urn::toString) + .anyMatch(groups::contains)); } // If there are no groups on the policy, return false for the group match. return false; @@ -296,7 +313,8 @@ private boolean isOwnerMatch( final DataHubActorFilter actorFilter, final Optional requestResource, final PolicyEvaluationContext context) { - // If the policy does not apply to owners, or there is no resource to own, return false immediately. + // If the policy does not apply to owners, or there is no resource to own, return false + // immediately. if (!actorFilter.isResourceOwners() || requestResource.isEmpty()) { return false; } @@ -308,8 +326,12 @@ private Set getOwnersForType(EntitySpec resourceSpec, List ownershi Urn entityUrn = UrnUtils.getUrn(resourceSpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return Collections.emptySet(); } @@ -328,7 +350,8 @@ private Set getOwnersForType(EntitySpec resourceSpec, List ownershi private boolean isActorOwner( final ResolvedEntitySpec resolvedActorSpec, - ResolvedEntitySpec resourceSpec, List ownershipTypes, + ResolvedEntitySpec resourceSpec, + List ownershipTypes, PolicyEvaluationContext context) { Set owners = this.getOwnersForType(resourceSpec.getSpec(), ownershipTypes); if (isUserOwner(resolvedActorSpec, owners)) { @@ -357,12 +380,11 @@ private boolean isRoleMatch( } // If the actor has a matching "Role" in the actor filter, return true immediately. Set actorRoles = resolveRoles(resolvedActorSpec, context); - return Objects.requireNonNull(actorFilter.getRoles()) - .stream() - .anyMatch(actorRoles::contains); + return Objects.requireNonNull(actorFilter.getRoles()).stream().anyMatch(actorRoles::contains); } - private Set resolveRoles(final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set resolveRoles( + final ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.roles != null) { return context.roles; } @@ -374,14 +396,21 @@ private Set resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy try { Urn actorUrn = Urn.createFromString(actor); - final EntityResponse corpUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, Collections.singleton(actorUrn), - Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), _systemAuthentication).get(actorUrn); + final EntityResponse corpUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(actorUrn), + Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication) + .get(actorUrn); if (corpUser == null || !corpUser.hasAspects()) { return roles; } aspectMap = corpUser.getAspects(); } catch (Exception e) { - log.error(String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); + log.error( + String.format("Failed to fetch %s for urn %s", ROLE_MEMBERSHIP_ASPECT_NAME, actor), e); return roles; } @@ -389,7 +418,8 @@ private Set resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - RoleMembership roleMembership = new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); + RoleMembership roleMembership = + new RoleMembership(aspectMap.get(ROLE_MEMBERSHIP_ASPECT_NAME).getValue().data()); if (roleMembership.hasRoles()) { roles.addAll(roleMembership.getRoles()); context.setRoles(roles); @@ -397,7 +427,8 @@ private Set resolveRoles(final ResolvedEntitySpec resolvedActorSpec, Policy return roles; } - private Set resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { + private Set resolveGroups( + ResolvedEntitySpec resolvedActorSpec, PolicyEvaluationContext context) { if (context.groups != null) { return context.groups; } @@ -408,9 +439,7 @@ private Set resolveGroups(ResolvedEntitySpec resolvedActorSpec, PolicyEv return groups; } - /** - * Class used to store state across a single Policy evaluation. - */ + /** Class used to store state across a single Policy evaluation. */ static class PolicyEvaluationContext { private Set groups; private Set roles; @@ -424,9 +453,7 @@ public void setRoles(Set roles) { } } - /** - * Class used to represent the result of a Policy evaluation - */ + /** Class used to represent the result of a Policy evaluation */ static class PolicyEvaluationResult { public static final PolicyEvaluationResult GRANTED = new PolicyEvaluationResult(true); public static final PolicyEvaluationResult DENIED = new PolicyEvaluationResult(false); @@ -442,9 +469,7 @@ public boolean isGranted() { } } - /** - * Class used to represent all valid users of a policy. - */ + /** Class used to represent all valid users of a policy. */ @Value @AllArgsConstructor(access = AccessLevel.PUBLIC) public static class PolicyActors { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java index c06da4d245f91..9c5950985eea4 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyFetcher.java @@ -1,5 +1,8 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -20,19 +23,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.DATAHUB_POLICY_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; - - -/** - * Wrapper around entity client to fetch policies in a paged manner - */ +/** Wrapper around entity client to fetch policies in a paged manner */ @Slf4j @RequiredArgsConstructor public class PolicyFetcher { @@ -42,49 +38,66 @@ public class PolicyFetcher { new SortCriterion().setField("lastUpdatedTimestamp").setOrder(SortOrder.DESCENDING); /** - * This is to provide a scroll implementation using the start/count api. It is not efficient - * and the scroll native functions should be used instead. This does fix a failure to fetch - * policies when deep pagination happens where there are >10k policies. - * Exists primarily to prevent breaking change to the graphql api. + * This is to provide a scroll implementation using the start/count api. It is not efficient and + * the scroll native functions should be used instead. This does fix a failure to fetch policies + * when deep pagination happens where there are >10k policies. Exists primarily to prevent + * breaking change to the graphql api. */ @Deprecated - public CompletableFuture fetchPolicies(int start, String query, int count, Authentication authentication) { - return CompletableFuture.supplyAsync(() -> { - try { - PolicyFetchResult result = PolicyFetchResult.EMPTY; - String scrollId = ""; - int fetchedResults = 0; - - while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { - PolicyFetchResult tmpResult = fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); - fetchedResults += tmpResult.getPolicies().size(); - scrollId = tmpResult.getScrollId(); - if (fetchedResults > start) { - result = tmpResult; + public CompletableFuture fetchPolicies( + int start, String query, int count, Authentication authentication) { + return CompletableFuture.supplyAsync( + () -> { + try { + PolicyFetchResult result = PolicyFetchResult.EMPTY; + String scrollId = ""; + int fetchedResults = 0; + + while (PolicyFetchResult.EMPTY.equals(result) && scrollId != null) { + PolicyFetchResult tmpResult = + fetchPolicies(query, count, scrollId.isEmpty() ? null : scrollId, authentication); + fetchedResults += tmpResult.getPolicies().size(); + scrollId = tmpResult.getScrollId(); + if (fetchedResults > start) { + result = tmpResult; + } + } + + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list policies", e); } - } - - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list policies", e); - } - }); + }); } - public PolicyFetchResult fetchPolicies(int count, @Nullable String scrollId, Authentication authentication) - throws RemoteInvocationException, URISyntaxException { + public PolicyFetchResult fetchPolicies( + int count, @Nullable String scrollId, Authentication authentication) + throws RemoteInvocationException, URISyntaxException { return fetchPolicies("", count, scrollId, authentication); } - public PolicyFetchResult fetchPolicies(String query, int count, @Nullable String scrollId, Authentication authentication) + public PolicyFetchResult fetchPolicies( + String query, int count, @Nullable String scrollId, Authentication authentication) throws RemoteInvocationException, URISyntaxException { log.debug(String.format("Batch fetching policies. count: %s, scroll: %s", count, scrollId)); // First fetch all policy urns - ScrollResult result = _entityClient.scrollAcrossEntities(List.of(POLICY_ENTITY_NAME), query, null, scrollId, - null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) - .setSkipHighlighting(true).setFulltext(true), authentication); - List policyUrns = result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + ScrollResult result = + _entityClient.scrollAcrossEntities( + List.of(POLICY_ENTITY_NAME), + query, + null, + scrollId, + null, + count, + new SearchFlags() + .setSkipCache(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setFulltext(true), + authentication); + List policyUrns = + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); if (policyUrns.isEmpty()) { return PolicyFetchResult.EMPTY; @@ -92,23 +105,29 @@ null, count, new SearchFlags().setSkipCache(true).setSkipAggregates(true) // Fetch DataHubPolicyInfo aspects for each urn final Map policyEntities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); - return new PolicyFetchResult(policyUrns.stream() - .map(policyEntities::get) - .filter(Objects::nonNull) - .map(this::extractPolicy) - .filter(Objects::nonNull) - .collect(Collectors.toList()), result.getNumEntities(), result.getScrollId()); + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, new HashSet<>(policyUrns), null, authentication); + return new PolicyFetchResult( + policyUrns.stream() + .map(policyEntities::get) + .filter(Objects::nonNull) + .map(this::extractPolicy) + .filter(Objects::nonNull) + .collect(Collectors.toList()), + result.getNumEntities(), + result.getScrollId()); } private Policy extractPolicy(EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); if (!aspectMap.containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) { - // Right after deleting the policy, there could be a small time frame where search and local db is not consistent. + // Right after deleting the policy, there could be a small time frame where search and local + // db is not consistent. // Simply return null in that case return null; } - return new Policy(entityResponse.getUrn(), + return new Policy( + entityResponse.getUrn(), new DataHubPolicyInfo(aspectMap.get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data())); } @@ -116,10 +135,10 @@ private Policy extractPolicy(EntityResponse entityResponse) { public static class PolicyFetchResult { List policies; int total; - @Nullable - String scrollId; + @Nullable String scrollId; - public static final PolicyFetchResult EMPTY = new PolicyFetchResult(Collections.emptyList(), 0, null); + public static final PolicyFetchResult EMPTY = + new PolicyFetchResult(Collections.emptyList(), 0, null); } @Value diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java index cbb237654e969..c24c65725830f 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProvider.java @@ -1,5 +1,7 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -16,11 +18,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - -/** - * Provides field resolver for domain given resourceSpec - */ +/** Provides field resolver for domain given resourceSpec */ @Slf4j @RequiredArgsConstructor public class DataPlatformInstanceFieldResolverProvider implements EntityFieldResolverProvider { @@ -40,7 +38,8 @@ public FieldResolver getFieldResolver(EntitySpec entitySpec) { private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); - // In the case that the entity is a platform instance, the associated platform instance entity is the instance itself + // In the case that the entity is a platform instance, the associated platform instance entity + // is the instance itself if (entityUrn.getEntityType().equals(DATA_PLATFORM_INSTANCE_ENTITY_NAME)) { return FieldResolver.FieldValue.builder() .values(Collections.singleton(entityUrn.toString())) @@ -49,9 +48,14 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) EnvelopedAspect dataPlatformInstanceAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), _systemAuthentication); - if (response == null || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME), + _systemAuthentication); + if (response == null + || !response.getAspects().containsKey(DATA_PLATFORM_INSTANCE_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } dataPlatformInstanceAspect = response.getAspects().get(DATA_PLATFORM_INSTANCE_ASPECT_NAME); @@ -59,12 +63,15 @@ private FieldResolver.FieldValue getDataPlatformInstance(EntitySpec entitySpec) log.error("Error while retrieving platform instance aspect for urn {}", entityUrn, e); return FieldResolver.emptyFieldValue(); } - DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); + DataPlatformInstance dataPlatformInstance = + new DataPlatformInstance(dataPlatformInstanceAspect.getValue().data()); if (dataPlatformInstance.getInstance() == null) { return FieldResolver.emptyFieldValue(); } return FieldResolver.FieldValue.builder() - .values(Collections.singleton(Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) + .values( + Collections.singleton( + Objects.requireNonNull(dataPlatformInstance.getInstance()).toString())) .build(); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java index 15d821b75c0bd..e99e13ce00145 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/DomainFieldResolverProvider.java @@ -1,9 +1,11 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.domain.DomainProperties; @@ -11,25 +13,17 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; - import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - - -/** - * Provides field resolver for domain given entitySpec - */ +/** Provides field resolver for domain given entitySpec */ @Slf4j @RequiredArgsConstructor public class DomainFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,29 +45,35 @@ private Set getBatchedParentDomains(@Nonnull final Set urns) { final Set parentUrns = new HashSet<>(); try { - final Map batchResponse = _entityClient.batchGetV2( - DOMAIN_ENTITY_NAME, - urns, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - _systemAuthentication - ); - - batchResponse.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); - if (properties.hasParentDomain()) { - parentUrns.add(properties.getParentDomain()); - } - } - }); + final Map batchResponse = + _entityClient.batchGetV2( + DOMAIN_ENTITY_NAME, + urns, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + _systemAuthentication); + + batchResponse.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); + if (properties.hasParentDomain()) { + parentUrns.add(properties.getParentDomain()); + } + } + }); } catch (Exception e) { log.error( "Error while retrieving parent domains for {} urns including \"{}\"", urns.size(), urns.stream().findFirst().map(Urn::toString).orElse(""), - e - ); + e); } return parentUrns; @@ -90,8 +90,12 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { final EnvelopedAspect domainsAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(DOMAINS_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(DOMAINS_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(DOMAINS_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -106,7 +110,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { * To avoid cycles we remove any parents we've already visited to prevent an infinite loop cycle. */ - final Set domainUrns = new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); + final Set domainUrns = + new HashSet<>(new Domains(domainsAspect.getValue().data()).getDomains()); Set batchedParentUrns = getBatchedParentDomains(domainUrns); batchedParentUrns.removeAll(domainUrns); @@ -116,9 +121,8 @@ private FieldResolver.FieldValue getDomains(EntitySpec entitySpec) { batchedParentUrns.removeAll(domainUrns); } - return FieldResolver.FieldValue.builder().values(domainUrns - .stream() - .map(Object::toString) - .collect(Collectors.toSet())).build(); + return FieldResolver.FieldValue.builder() + .values(domainUrns.stream().map(Object::toString).collect(Collectors.toSet())) + .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java index 227d403a9cd1d..8cb612515e626 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityFieldResolverProvider.java @@ -1,24 +1,20 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import java.util.List; - -/** - * Base class for defining a class that provides the field resolver for the given field type - */ +/** Base class for defining a class that provides the field resolver for the given field type */ public interface EntityFieldResolverProvider { /** * List of fields that this hydrator is hydrating. + * * @return */ List getFieldTypes(); - /** - * Return resolver for fetching the field values given the entity - */ + /** Return resolver for fetching the field values given the entity */ FieldResolver getFieldResolver(EntitySpec entitySpec); } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java index addac84c68b18..d4dbf86172954 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityTypeFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity type given entitySpec - */ +/** Provides field resolver for entity type given entitySpec */ public class EntityTypeFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java index 32960de687839..c4d27d959e023 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/EntityUrnFieldResolverProvider.java @@ -1,16 +1,13 @@ package com.datahub.authorization.fieldresolverprovider; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.datastax.oss.driver.shaded.guava.common.collect.ImmutableList; import java.util.Collections; import java.util.List; - -/** - * Provides field resolver for entity urn given entitySpec - */ +/** Provides field resolver for entity urn given entitySpec */ public class EntityUrnFieldResolverProvider implements EntityFieldResolverProvider { @Override diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java index b1202d9f4bbd3..a64dc3a8b5db8 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProvider.java @@ -1,33 +1,29 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; + import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.metadata.Constants; -import com.linkedin.identity.GroupMembership; -import java.util.Collections; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static com.linkedin.metadata.Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME; - - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class GroupMembershipFieldResolverProvider implements EntityFieldResolverProvider { @@ -51,21 +47,30 @@ private FieldResolver.FieldValue getGroupMembership(EntitySpec entitySpec) { EnvelopedAspect nativeGroupMembershipAspect; List groups = new ArrayList<>(); try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null - || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) - || response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { + || !(response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME) + || response + .getAspects() + .containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME))) { return FieldResolver.emptyFieldValue(); } if (response.getAspects().containsKey(Constants.GROUP_MEMBERSHIP_ASPECT_NAME)) { groupMembershipAspect = response.getAspects().get(Constants.GROUP_MEMBERSHIP_ASPECT_NAME); - GroupMembership groupMembership = new GroupMembership(groupMembershipAspect.getValue().data()); + GroupMembership groupMembership = + new GroupMembership(groupMembershipAspect.getValue().data()); groups.addAll(groupMembership.getGroups()); } if (response.getAspects().containsKey(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)) { - nativeGroupMembershipAspect = response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); - NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); + nativeGroupMembershipAspect = + response.getAspects().get(Constants.NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME); + NativeGroupMembership nativeGroupMembership = + new NativeGroupMembership(nativeGroupMembershipAspect.getValue().data()); groups.addAll(nativeGroupMembership.getNativeGroups()); } } catch (Exception e) { diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java index 3c27f9e6ce8d7..d26082bab6d63 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/fieldresolverprovider/OwnerFieldResolverProvider.java @@ -1,9 +1,9 @@ package com.datahub.authorization.fieldresolverprovider; import com.datahub.authentication.Authentication; -import com.datahub.authorization.FieldResolver; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; +import com.datahub.authorization.FieldResolver; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -17,10 +17,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - -/** - * Provides field resolver for owners given entitySpec - */ +/** Provides field resolver for owners given entitySpec */ @Slf4j @RequiredArgsConstructor public class OwnerFieldResolverProvider implements EntityFieldResolverProvider { @@ -42,8 +39,12 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { Urn entityUrn = UrnUtils.getUrn(entitySpec.getEntity()); EnvelopedAspect ownershipAspect; try { - EntityResponse response = _entityClient.getV2(entityUrn.getEntityType(), entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), _systemAuthentication); + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { return FieldResolver.emptyFieldValue(); } @@ -54,7 +55,10 @@ private FieldResolver.FieldValue getOwners(EntitySpec entitySpec) { } Ownership ownership = new Ownership(ownershipAspect.getValue().data()); return FieldResolver.FieldValue.builder() - .values(ownership.getOwners().stream().map(owner -> owner.getOwner().toString()).collect(Collectors.toSet())) + .values( + ownership.getOwners().stream() + .map(owner -> owner.getOwner().toString()) + .collect(Collectors.toSet())) .build(); } } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java index cd7ae5c3bffc4..51a700a935274 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/role/RoleService.java @@ -1,5 +1,8 @@ package com.datahub.authorization.role; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; @@ -14,35 +17,45 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor public class RoleService { private final EntityClient _entityClient; - public void batchAssignRoleToActors(@Nonnull final List actors, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void batchAssignRoleToActors( + @Nonnull final List actors, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (roleUrn != null && !_entityClient.exists(roleUrn, authentication)) { - throw new RuntimeException(String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); + throw new RuntimeException( + String.format("Role %s does not exist. Skipping batch role assignment", roleUrn)); } - actors.forEach(actor -> { - try { - assignRoleToActor(actor, roleUrn, authentication); - } catch (Exception e) { - log.warn(String.format("Failed to assign role %s to actor %s. Skipping actor assignment", roleUrn, actor), e); - } - }); + actors.forEach( + actor -> { + try { + assignRoleToActor(actor, roleUrn, authentication); + } catch (Exception e) { + log.warn( + String.format( + "Failed to assign role %s to actor %s. Skipping actor assignment", + roleUrn, actor), + e); + } + }); } - private void assignRoleToActor(@Nonnull final String actor, @Nullable final Urn roleUrn, - @Nonnull final Authentication authentication) throws URISyntaxException, RemoteInvocationException { + private void assignRoleToActor( + @Nonnull final String actor, + @Nullable final Urn roleUrn, + @Nonnull final Authentication authentication) + throws URISyntaxException, RemoteInvocationException { final Urn actorUrn = Urn.createFromString(actor); if (!_entityClient.exists(actorUrn, authentication)) { - log.warn(String.format("Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", - roleUrn, actor)); + log.warn( + String.format( + "Failed to assign role %s to actor %s, actor does not exist. Skipping actor assignment", + roleUrn, actor)); return; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index ac27e1a16c8b7..dc63b5e4a2897 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -1,5 +1,7 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; @@ -27,9 +29,6 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class TrackingService { @@ -56,11 +55,29 @@ public class TrackingService { private static final String INTERVAL_FIELD = "interval"; private static final String VIEW_TYPE_FIELD = "viewType"; - private static final Set ALLOWED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(EVENT_TYPE_FIELD, ENTITY_TYPE_FIELD, ENTITY_TYPE_FILTER_FIELD, - PAGE_NUMBER_FIELD, PAGE_FIELD, TOTAL_FIELD, INDEX_FIELD, RESULT_TYPE_FIELD, RENDER_ID_FIELD, MODULE_ID_FIELD, - RENDER_TYPE_FIELD, SCENARIO_TYPE_FIELD, SECTION_FIELD, ACCESS_TOKEN_TYPE_FIELD, DURATION_FIELD, - ROLE_URN_FIELD, POLICY_URN_FIELD, SOURCE_TYPE_FIELD, INTERVAL_FIELD, VIEW_TYPE_FIELD)); + private static final Set ALLOWED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + EVENT_TYPE_FIELD, + ENTITY_TYPE_FIELD, + ENTITY_TYPE_FILTER_FIELD, + PAGE_NUMBER_FIELD, + PAGE_FIELD, + TOTAL_FIELD, + INDEX_FIELD, + RESULT_TYPE_FIELD, + RENDER_ID_FIELD, + MODULE_ID_FIELD, + RENDER_TYPE_FIELD, + SCENARIO_TYPE_FIELD, + SECTION_FIELD, + ACCESS_TOKEN_TYPE_FIELD, + DURATION_FIELD, + ROLE_URN_FIELD, + POLICY_URN_FIELD, + SOURCE_TYPE_FIELD, + INTERVAL_FIELD, + VIEW_TYPE_FIELD)); private static final String ACTOR_URN_FIELD = "actorUrn"; private static final String ORIGIN_FIELD = "origin"; @@ -72,9 +89,20 @@ public class TrackingService { private static final String USER_URN_FIELD = "userUrn"; private static final String USER_URNS_FIELD = "userUrns"; private static final String PARENT_NODE_URN_FIELD = "parentNodeUrn"; - private static final Set ALLOWED_OBFUSCATED_EVENT_FIELDS = new HashSet<>( - ImmutableList.of(ACTOR_URN_FIELD, ORIGIN_FIELD, ENTITY_URN_FIELD, ENTITY_URNS_FIELD, GROUP_NAME_FIELD, - SECTION_FIELD, ENTITY_PAGE_FILTER_FIELD, PATH_FIELD, USER_URN_FIELD, USER_URNS_FIELD, PARENT_NODE_URN_FIELD)); + private static final Set ALLOWED_OBFUSCATED_EVENT_FIELDS = + new HashSet<>( + ImmutableList.of( + ACTOR_URN_FIELD, + ORIGIN_FIELD, + ENTITY_URN_FIELD, + ENTITY_URNS_FIELD, + GROUP_NAME_FIELD, + SECTION_FIELD, + ENTITY_PAGE_FILTER_FIELD, + PATH_FIELD, + USER_URN_FIELD, + USER_URNS_FIELD, + PARENT_NODE_URN_FIELD)); private final MixpanelAPI _mixpanelAPI; private final MessageBuilder _mixpanelMessageBuilder; @@ -100,9 +128,11 @@ public void emitAnalyticsEvent(@Nonnull final JsonNode event) { } try { - _mixpanelAPI.sendMessage(_mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); + _mixpanelAPI.sendMessage( + _mixpanelMessageBuilder.event(getClientId(), eventType, sanitizedEvent)); } catch (IOException e) { - log.info("Failed to send event to Mixpanel; this does not affect the functionality of the application"); + log.info( + "Failed to send event to Mixpanel; this does not affect the functionality of the application"); log.debug("Failed to send event to Mixpanel", e); } } @@ -134,7 +164,8 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { final JSONObject unsanitizedEventObj; try { - unsanitizedEventObj = new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); + unsanitizedEventObj = + new JSONObject(_objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(event)); } catch (Exception e) { log.warn("Failed to serialize event", e); return createFailedEvent(); @@ -145,18 +176,25 @@ JSONObject sanitizeEvent(@Nonnull final JsonNode event) { return createFailedEvent(); } - unsanitizedEventObj.keys().forEachRemaining(key -> { - String keyString = (String) key; - try { - if (ALLOWED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); - } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { - sanitizedEventObj.put(keyString, _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); - } - } catch (JSONException e) { - log.warn(String.format("Failed to sanitize field %s. Skipping this field.", keyString), e); - } - }); + unsanitizedEventObj + .keys() + .forEachRemaining( + key -> { + String keyString = (String) key; + try { + if (ALLOWED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put(keyString, unsanitizedEventObj.get(keyString).toString()); + } else if (ALLOWED_OBFUSCATED_EVENT_FIELDS.contains(keyString)) { + sanitizedEventObj.put( + keyString, + _secretService.hashString(unsanitizedEventObj.get(keyString).toString())); + } + } catch (JSONException e) { + log.warn( + String.format("Failed to sanitize field %s. Skipping this field.", keyString), + e); + } + }); return transformObjectNodeToJSONObject(sanitizedEventObj); } @@ -189,8 +227,8 @@ private static String createClientIdIfNotPresent(@Nonnull final EntityService en final AuditStamp clientIdStamp = new AuditStamp(); clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, - null); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); return uuid; } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java index 2e25493133b43..5b5702de4381a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/AuthenticatorChainTest.java @@ -1,18 +1,17 @@ package com.datahub.authentication.authenticator; -import com.datahub.authentication.Authentication; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; +import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authentication.AuthenticationExpiredException; -import com.datahub.plugins.auth.authentication.Authenticator; import com.datahub.authentication.AuthenticationRequest; +import com.datahub.plugins.auth.authentication.Authenticator; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - public class AuthenticatorChainTest { @Test @@ -23,7 +22,8 @@ public void testAuthenticateSuccess() throws Exception { final Authentication mockAuthentication = Mockito.mock(Authentication.class); Mockito.when(mockAuthenticator1.authenticate(Mockito.any())).thenReturn(mockAuthentication); - Mockito.when(mockAuthenticator2.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator2.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator1); authenticatorChain.register(mockAuthenticator2); @@ -40,13 +40,13 @@ public void testAuthenticateSuccess() throws Exception { verify(mockAuthenticator2, times(0)).authenticate(any()); } - @Test public void testAuthenticateFailure() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationException("Failed to authenticate")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationException("Failed to authenticate")); authenticatorChain.register(mockAuthenticator); @@ -55,7 +55,8 @@ public void testAuthenticateFailure() throws Exception { Authentication result = authenticatorChain.authenticate(mockContext, false); - // If the authenticator throws, verify that null is returned to indicate failure to authenticate. + // If the authenticator throws, verify that null is returned to indicate failure to + // authenticate. assertNull(result); } @@ -64,13 +65,16 @@ public void testAuthenticateThrows() throws Exception { final AuthenticatorChain authenticatorChain = new AuthenticatorChain(); final Authenticator mockAuthenticator = Mockito.mock(Authenticator.class); final Authentication mockAuthentication = Mockito.mock(Authentication.class); - Mockito.when(mockAuthenticator.authenticate(Mockito.any())).thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); + Mockito.when(mockAuthenticator.authenticate(Mockito.any())) + .thenThrow(new AuthenticationExpiredException("Failed to authenticate, token has expired")); authenticatorChain.register(mockAuthenticator); // Verify that the mock authentication is returned on Authenticate. final AuthenticationRequest mockContext = Mockito.mock(AuthenticationRequest.class); - assertThrows(AuthenticationExpiredException.class, () -> authenticatorChain.authenticate(mockContext, false)); + assertThrows( + AuthenticationExpiredException.class, + () -> authenticatorChain.authenticate(mockContext, false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java index 759ecaa8f3a4d..62395c77e3847 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubJwtTokenAuthenticatorTest.java @@ -1,9 +1,12 @@ package com.datahub.authentication.authenticator; +import static org.mockito.Mockito.*; +import static org.testng.AssertJUnit.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; -import com.datahub.authentication.AuthenticationRequest; import com.datahub.authentication.AuthenticationException; +import com.datahub.authentication.AuthenticationRequest; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.HashSet; @@ -11,10 +14,6 @@ import java.util.Map; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtTokenAuthenticatorTest { @Test @@ -28,14 +27,16 @@ void testPublicAuthentication() throws Exception { HashSet set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map config = new HashMap<>(); config.put("userIdClaim", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6ffAt0gunVTLw7onLRnrq0/" + "IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemMghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE" + "8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V264Vf/3jvredZiKRkgwlL9xNAwxXFg0x/XFw005UWVRIkdgcKWTjpBP2dPwVZ4WWC+9aGVd+Gyn1o0CLelf" @@ -59,7 +60,8 @@ void testInvalidToken() throws Exception { + "L5lrwEO-rTXYNamy8gJOBoM8n7gHDOo6JDd25go4MsLbjHbQ-WNq5SErgaNOMfZdkg2jqKVldZvjW33v8aupx08fzONnuzaYIJBQpONhGzDkYZKkk" + "rewdrYYVl_naNRWsKt8uSVu83G3mLhMPazkxNT5CWfNR7sdXfladz8U6ruLFOGUJJ5KDjEVAReRpEbxaKOIY6oFio1TeUQsi" + "6vppLXB0RupTBmE5dr7rxdL4j9eDY94M2uowBDuOsEGA"; - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); @@ -84,14 +86,16 @@ void testUserClaim() throws Exception { HashSet set = new HashSet<>(); set.add("https://test.com/realm/domain"); - final AuthenticationRequest context = new AuthenticationRequest(ImmutableMap.of("Authorization", token)); + final AuthenticationRequest context = + new AuthenticationRequest(ImmutableMap.of("Authorization", token)); DataHubJwtTokenAuthenticator mock = mock(DataHubJwtTokenAuthenticator.class); when(mock.authenticate(context)).thenCallRealMethod(); Map config = new HashMap<>(); config.put("userId", "username"); config.put("trustedIssuers", getTrustedIssuer()); - config.put("publicKey", + config.put( + "publicKey", "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAu1SU1LfVLPHCozMxH2Mo4lgOEePzNm0tRgeLezV6" + "ffAt0gunVTLw7onLRnrq0/IzW7yWR7QkrmBL7jTKEn5u+qKhbwKfBstIs+bMY2Zkp18gnTxKLxoS2tFczGkPLPgizskuemM" + "ghRniWaoLcyehkd3qqGElvW/VDL5AaWTg0nLVkjRo9z+40RQzuVaE8AkAFmxZzow3x+VJYKdjykkJ0iT9wCS0DRTXu269V26" diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java index 72b2fd5769715..819caa80d3417 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubSystemAuthenticatorTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; @@ -8,9 +11,6 @@ import java.util.Collections; import org.testng.annotations.Test; -import static com.datahub.authentication.AuthenticationConstants.*; -import static org.testng.Assert.*; - public class DataHubSystemAuthenticatorTest { private static final String TEST_CLIENT_ID = "clientId"; @@ -21,17 +21,33 @@ public void testInit() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); assertThrows(() -> authenticator.init(null, null)); assertThrows(() -> authenticator.init(Collections.emptyMap(), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); + assertThrows( + () -> authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID), null)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of(SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null)); // Correct configs provided. - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); } @Test public void testAuthenticateFailureMissingAuthorizationHeader() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); @@ -40,22 +56,39 @@ public void testAuthenticateFailureMissingAuthorizationHeader() { @Test public void testAuthenticateFailureMissingBasicCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, "Bearer something") // Missing basic authentication. + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @Test public void testAuthenticateFailureMismatchingCredentials() { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic incorrectId:incorrectSecret") // Incorrect authentication - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + "Basic incorrectId:incorrectSecret") // Incorrect authentication + ); assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); } @@ -63,12 +96,19 @@ public void testAuthenticateFailureMismatchingCredentials() { public void testAuthenticateSuccessNoDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); + authenticator.init( + ImmutableMap.of( + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); final Authentication authentication = authenticator.authenticate(context); @@ -84,13 +124,23 @@ public void testAuthenticateSuccessNoDelegatedActor() throws Exception { public void testAuthenticateSuccessDelegatedActor() throws Exception { final DataHubSystemAuthenticator authenticator = new DataHubSystemAuthenticator(); - authenticator.init(ImmutableMap.of(SYSTEM_CLIENT_ID_CONFIG, TEST_CLIENT_ID, SYSTEM_CLIENT_SECRET_CONFIG, TEST_CLIENT_SECRET), null); - - final String authorizationHeaderValue = String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); - final AuthenticationRequest context = new AuthenticationRequest( + authenticator.init( ImmutableMap.of( - AUTHORIZATION_HEADER_NAME, authorizationHeaderValue, LEGACY_X_DATAHUB_ACTOR_HEADER, "urn:li:corpuser:datahub") - ); + SYSTEM_CLIENT_ID_CONFIG, + TEST_CLIENT_ID, + SYSTEM_CLIENT_SECRET_CONFIG, + TEST_CLIENT_SECRET), + null); + + final String authorizationHeaderValue = + String.format("Basic %s:%s", TEST_CLIENT_ID, TEST_CLIENT_SECRET); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of( + AUTHORIZATION_HEADER_NAME, + authorizationHeaderValue, + LEGACY_X_DATAHUB_ACTOR_HEADER, + "urn:li:corpuser:datahub")); final Authentication authentication = authenticator.authenticate(context); @@ -101,4 +151,4 @@ public void testAuthenticateSuccessDelegatedActor() throws Exception { assertEquals(authentication.getCredentials(), authorizationHeaderValue); assertEquals(authentication.getClaims(), Collections.emptyMap()); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java index f5ce938c411c6..5bd273f3bacf8 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/authenticator/DataHubTokenAuthenticatorTest.java @@ -1,5 +1,17 @@ package com.datahub.authentication.authenticator; +import static com.datahub.authentication.AuthenticationConstants.*; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; +import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; +import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,130 +27,167 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import org.mockito.Mockito; -import org.testng.annotations.Test; import java.util.Collections; import java.util.Map; - -import static com.datahub.authentication.AuthenticationConstants.*; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SALT_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_ALG_CONFIG_NAME; -import static com.datahub.authentication.authenticator.DataHubTokenAuthenticator.SIGNING_KEY_CONFIG_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_ID_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.ACTOR_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_TYPE_CLAIM_NAME; -import static com.datahub.authentication.token.TokenClaims.TOKEN_VERSION_CLAIM_NAME; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertThrows; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataHubTokenAuthenticatorTest { - private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; - private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; - - final EntityService mockService = Mockito.mock(EntityService.class); - final StatefulTokenService statefulTokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); - - @Test - public void testInit() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - AuthenticatorContext authenticatorContext = - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); - assertThrows(() -> authenticator.init(null, authenticatorContext)); - assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "UNSUPPORTED_ALG"), authenticatorContext)); - assertThrows(() -> authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, - SIGNING_ALG_CONFIG_NAME, "HS256"), null)); - - // Correct configs provided. - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), authenticatorContext); - } - - @Test - public void testAuthenticateFailureMissingAuthorizationHeader() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureMissingBearerCredentials() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateFailureInvalidToken() { - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken") - ); - assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); - } - - @Test - public void testAuthenticateSuccess() throws Exception { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); - Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); - Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - - final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); - authenticator.init(ImmutableMap.of(SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SALT_CONFIG_NAME, - TEST_SALT, SIGNING_ALG_CONFIG_NAME, "HS256"), - new AuthenticatorContext(ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); - - final Actor datahub = new Actor(ActorType.USER, "datahub"); - final String validToken = authenticator._statefulTokenService.generateAccessToken( - TokenType.PERSONAL, - datahub, - "some token", - "A token description", - datahub.toUrnStr() - ); - - final String authorizationHeaderValue = String.format("Bearer %s", validToken); - final AuthenticationRequest context = new AuthenticationRequest( - ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue) - ); - - final Authentication authentication = authenticator.authenticate(context); - - // Validate the resulting authentication object - assertNotNull(authentication); - assertEquals(authentication.getActor().getType(), ActorType.USER); - assertEquals(authentication.getActor().getId(), "datahub"); - assertEquals(authentication.getCredentials(), authorizationHeaderValue); - - Map claimsMap = authentication.getClaims(); - assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); - assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); - assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); - assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); - } + private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; + private static final String TEST_SALT = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI93="; + + final EntityService mockService = Mockito.mock(EntityService.class); + final StatefulTokenService statefulTokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALT); + + @Test + public void testInit() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService)); + assertThrows(() -> authenticator.init(null, authenticatorContext)); + assertThrows(() -> authenticator.init(Collections.emptyMap(), authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SIGNING_ALG_CONFIG_NAME, + "UNSUPPORTED_ALG"), + authenticatorContext)); + assertThrows( + () -> + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, TEST_SIGNING_KEY, SIGNING_ALG_CONFIG_NAME, "HS256"), + null)); + + // Correct configs provided. + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + authenticatorContext); + } + + @Test + public void testAuthenticateFailureMissingAuthorizationHeader() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = new AuthenticationRequest(Collections.emptyMap()); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureMissingBearerCredentials() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Basic username:password")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateFailureInvalidToken() { + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, "Bearer someRandomToken")); + assertThrows(AuthenticationException.class, () -> authenticator.authenticate(context)); + } + + @Test + public void testAuthenticateSuccess() throws Exception { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); + Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); + Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); + + final DataHubTokenAuthenticator authenticator = new DataHubTokenAuthenticator(); + authenticator.init( + ImmutableMap.of( + SIGNING_KEY_CONFIG_NAME, + TEST_SIGNING_KEY, + SALT_CONFIG_NAME, + TEST_SALT, + SIGNING_ALG_CONFIG_NAME, + "HS256"), + new AuthenticatorContext( + ImmutableMap.of(ENTITY_SERVICE, mockService, TOKEN_SERVICE, statefulTokenService))); + + final Actor datahub = new Actor(ActorType.USER, "datahub"); + final String validToken = + authenticator._statefulTokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); + + final String authorizationHeaderValue = String.format("Bearer %s", validToken); + final AuthenticationRequest context = + new AuthenticationRequest( + ImmutableMap.of(AUTHORIZATION_HEADER_NAME, authorizationHeaderValue)); + + final Authentication authentication = authenticator.authenticate(context); + + // Validate the resulting authentication object + assertNotNull(authentication); + assertEquals(authentication.getActor().getType(), ActorType.USER); + assertEquals(authentication.getActor().getId(), "datahub"); + assertEquals(authentication.getCredentials(), authorizationHeaderValue); + + Map claimsMap = authentication.getClaims(); + assertEquals(claimsMap.get(TOKEN_VERSION_CLAIM_NAME), 2); + assertEquals(claimsMap.get(TOKEN_TYPE_CLAIM_NAME), "PERSONAL"); + assertEquals(claimsMap.get(ACTOR_TYPE_CLAIM_NAME), "USER"); + assertEquals(claimsMap.get(ACTOR_ID_CLAIM_NAME), "datahub"); + } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java index 81cf94d3bfe02..6d0678d4f3558 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/group/GroupServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.group; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,11 +34,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GroupServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -67,20 +66,36 @@ public void setupTest() throws Exception { _groupKey.setName(GROUP_ID); NativeGroupMembership nativeGroupMembership = new NativeGroupMembership(); - nativeGroupMembership.setNativeGroups(new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); + nativeGroupMembership.setNativeGroups( + new UrnArray(Urn.createFromString(NATIVE_GROUP_URN_STRING))); GroupMembership groupMembership = new GroupMembership(); groupMembership.setGroups(new UrnArray(Urn.createFromString(EXTERNAL_GROUP_URN_STRING))); - _entityResponseMap = ImmutableMap.of(USER_URN, new EntityResponse().setEntityName(CORP_USER_ENTITY_NAME) - .setUrn(USER_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data())), GROUP_MEMBERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); - - _entityRelationships = new EntityRelationships().setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray(ImmutableList.of( - new EntityRelationship().setEntity(USER_URN).setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); + _entityResponseMap = + ImmutableMap.of( + USER_URN, + new EntityResponse() + .setEntityName(CORP_USER_ENTITY_NAME) + .setUrn(USER_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(nativeGroupMembership.data())), + GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(groupMembership.data())))))); + + _entityRelationships = + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(USER_URN) + .setType(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)))); _entityClient = mock(EntityClient.class); _entityService = mock(EntityService.class); @@ -118,7 +133,8 @@ public void testGetGroupOriginNullArguments() { @Test public void testGetGroupOriginPasses() { Origin groupOrigin = mock(Origin.class); - when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))).thenReturn(groupOrigin); + when(_entityService.getLatestAspect(eq(_groupUrn), eq(ORIGIN_ASPECT_NAME))) + .thenReturn(groupOrigin); assertEquals(groupOrigin, _groupService.getGroupOrigin(_groupUrn)); } @@ -132,8 +148,9 @@ public void testAddUserToNativeGroupNullArguments() { @Test public void testAddUserToNativeGroupPasses() throws Exception { when(_entityService.exists(USER_URN)).thenReturn(true); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); _groupService.addUserToNativeGroup(USER_URN, _groupUrn, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); @@ -141,68 +158,101 @@ public void testAddUserToNativeGroupPasses() throws Exception { @Test public void testCreateNativeGroupNullArguments() { - assertThrows(() -> _groupService.createNativeGroup(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createNativeGroup( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createNativeGroup(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test public void testCreateNativeGroupPasses() throws Exception { - _groupService.createNativeGroup(_groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); + _groupService.createNativeGroup( + _groupKey, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers( + null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.removeExistingNativeGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test - public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); - - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + public void testRemoveExistingNativeGroupMembersGroupNotInNativeGroupMembership() + throws Exception { + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); + + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingNativeGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingNativeGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingNativeGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipNullArguments() { - assertThrows(() -> _groupService.migrateGroupMembershipToNativeGroupMembership(null, USER_URN.toString(), - SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.migrateGroupMembershipToNativeGroupMembership( + null, USER_URN.toString(), SYSTEM_AUTHENTICATION)); } @Test public void testMigrateGroupMembershipToNativeGroupMembershipPasses() throws Exception { - when(_graphClient.getRelatedEntities(eq(EXTERNAL_GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_graphClient.getRelatedEntities( + eq(EXTERNAL_GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); when(_entityService.exists(USER_URN)).thenReturn(true); - _groupService.migrateGroupMembershipToNativeGroupMembership(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), - USER_URN.toString(), SYSTEM_AUTHENTICATION); + _groupService.migrateGroupMembershipToNativeGroupMembership( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), + USER_URN.toString(), + SYSTEM_AUTHENTICATION); verify(_entityClient, times(3)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testCreateGroupInfoNullArguments() { - assertThrows(() -> _groupService.createGroupInfo(null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + null, GROUP_NAME, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _groupService.createGroupInfo( + _groupKey, null, GROUP_DESCRIPTION, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.createGroupInfo(_groupKey, GROUP_NAME, null, SYSTEM_AUTHENTICATION)); } @Test @@ -229,36 +279,46 @@ public void testGetExistingGroupMembersNullArguments() { @Test public void testGetExistingGroupMembersPasses() { - when(_graphClient.getRelatedEntities(eq(GROUP_URN_STRING), - eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), eq(RelationshipDirection.INCOMING), anyInt(), - anyInt(), any())).thenReturn(_entityRelationships); - - assertEquals(USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); + when(_graphClient.getRelatedEntities( + eq(GROUP_URN_STRING), + eq(ImmutableList.of(IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME)), + eq(RelationshipDirection.INCOMING), + anyInt(), + anyInt(), + any())) + .thenReturn(_entityRelationships); + + assertEquals( + USER_URN_LIST, _groupService.getExistingGroupMembers(_groupUrn, USER_URN.toString())); } @Test public void testRemoveExistingGroupMembersNullArguments() { - assertThrows(() -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(null, USER_URN_LIST, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _groupService.removeExistingGroupMembers(_groupUrn, null, SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersGroupNotInGroupMembership() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(NATIVE_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } @Test public void testRemoveExistingGroupMembersPasses() throws Exception { - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))).thenReturn( - _entityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), any(), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(_entityResponseMap); - _groupService.removeExistingGroupMembers(Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, - SYSTEM_AUTHENTICATION); + _groupService.removeExistingGroupMembers( + Urn.createFromString(EXTERNAL_GROUP_URN_STRING), USER_URN_LIST, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java index 2eed108b40300..cd9d5972103c1 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.invite; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -17,11 +21,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class InviteTokenServiceTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -68,20 +67,24 @@ public void testIsInviteTokenValidTrue() throws Exception { @Test public void testGetInviteTokenRoleNullEntity() throws Exception { - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test public void testGetInviteTokenRoleEmptyAspectMap() throws Exception { final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); - assertThrows(() -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION)); } @Test @@ -89,11 +92,14 @@ public void testGetInviteTokenRoleNoRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNull(roleUrn); @@ -103,12 +109,16 @@ public void testGetInviteTokenRoleNoRole() throws Exception { public void testGetInviteTokenRole() throws Exception { final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, SYSTEM_AUTHENTICATION); assertNotNull(roleUrn); @@ -119,15 +129,22 @@ public void testGetInviteTokenRole() throws Exception { public void getInviteTokenRoleUrnDoesNotExist() throws Exception { when(_entityClient.exists(eq(roleUrn), eq(SYSTEM_AUTHENTICATION))).thenReturn(false); - assertThrows(() -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _inviteTokenService.getInviteToken(roleUrn.toString(), false, SYSTEM_AUTHENTICATION)); } @Test public void getInviteTokenRegenerate() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -140,8 +157,14 @@ public void getInviteTokenRegenerate() throws Exception { public void getInviteTokenEmptySearchResult() throws Exception { final SearchResult searchResult = new SearchResult(); searchResult.setEntities(new SearchEntityArray()); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); when(_secretService.generateUrlSafeToken(anyInt())).thenReturn(INVITE_TOKEN_STRING); when(_secretService.hashString(anyString())).thenReturn(HASHED_INVITE_TOKEN_STRING); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -157,10 +180,17 @@ public void getInviteTokenNullEntity() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(null); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(null); assertThrows(() -> _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION)); } @@ -172,12 +202,19 @@ public void getInviteTokenNoInviteTokenAspect() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse().setAspects(new EnvelopedAspectMap()); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.encrypt(anyString())).thenReturn(ENCRYPTED_INVITE_TOKEN_STRING); @@ -191,19 +228,31 @@ public void getInviteToken() throws Exception { final SearchEntity searchEntity = new SearchEntity().setEntity(inviteTokenUrn); searchEntityArray.add(searchEntity); searchResult.setEntities(searchEntityArray); - when(_entityClient.filter(eq(INVITE_TOKEN_ENTITY_NAME), any(), any(), anyInt(), anyInt(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(searchResult); + when(_entityClient.filter( + eq(INVITE_TOKEN_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + eq(SYSTEM_AUTHENTICATION))) + .thenReturn(searchResult); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - final InviteToken inviteTokenAspect = new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); - aspectMap.put(INVITE_TOKEN_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); + final InviteToken inviteTokenAspect = + new InviteToken().setToken(ENCRYPTED_INVITE_TOKEN_STRING).setRole(roleUrn); + aspectMap.put( + INVITE_TOKEN_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inviteTokenAspect.data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), - eq(SYSTEM_AUTHENTICATION))).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(INVITE_TOKEN_ENTITY_NAME), eq(inviteTokenUrn), any(), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(entityResponse); when(_secretService.decrypt(eq(ENCRYPTED_INVITE_TOKEN_STRING))).thenReturn(INVITE_TOKEN_STRING); - assertEquals(_inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), INVITE_TOKEN_STRING); + assertEquals( + _inviteTokenService.getInviteToken(null, false, SYSTEM_AUTHENTICATION), + INVITE_TOKEN_STRING); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java index 4c78ab13c9cda..d8a0716937525 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/post/PostServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.post; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,10 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PostServiceTest { private static final Urn POST_URN = UrnUtils.getUrn("urn:li:post:123"); private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; @@ -29,12 +28,15 @@ public class PostServiceTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Authentication SYSTEM_AUTHENTICATION = @@ -57,7 +59,8 @@ public void testMapMedia() { @Test public void testMapPostContent() { PostContent postContent = - _postService.mapPostContent(POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); + _postService.mapPostContent( + POST_CONTENT_TYPE.toString(), POST_TITLE, POST_DESCRIPTION, POST_LINK, MEDIA); assertEquals(POST_CONTENT, postContent); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java index 811bdaaa0fcf8..155f1314a0190 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/DataHubJwtSigningKeyResolverTest.java @@ -1,5 +1,7 @@ package com.datahub.authentication.token; +import static org.testng.AssertJUnit.*; + import io.jsonwebtoken.Claims; import io.jsonwebtoken.JwsHeader; import java.math.BigInteger; @@ -20,13 +22,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class DataHubJwtSigningKeyResolverTest { - @InjectMocks - private DataHubJwtSigningKeyResolver resolver; + @InjectMocks private DataHubJwtSigningKeyResolver resolver; @Test public void testResolveSigningKeyWithPublicKey() throws Exception { @@ -55,11 +53,12 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { HttpResponse httpResponse = Mockito.mock(HttpResponse.class); Mockito.when(httpResponse.statusCode()).thenReturn(200); - JSONObject token = new JSONObject( - "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" - + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" - + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" - + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); + JSONObject token = + new JSONObject( + "{\"kty\": \"RSA\", \"kid\": \"test_key\", \"n\": \"ueXyoaxgWhMTLwkowaskhiV85rbN9n_nLft8CxFUY3nbMpNybAWsWuhJ4SYLT4U-GbKdL-h-NYgBXKn" + + "GK1ieG6qSC25T3hWXTb3cNe73ZQUcZSivAV2tZouPYcb1XKSyKd-PsK8NsCpq1NHsJsrXSKq-7YCaf4MxIUaFXSZTE7ZNC0fPVqYH71jnyOU9FA_KJm0IC-x_Bs2g" + + "Ak3Eq1_6pZ_0VeYpczv82LACAUzi1vuU1gbbZLNHHl4DHwWb98eI1aCbWHNMux70Ba4aREOdKOWrxZ066W_NKUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH" + + "0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}"); PublicKey expectedKey = getPublicKey(token); String responseJson = @@ -69,11 +68,14 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { + "KUVtPY_njW66NvgBujxqHD2EQUc87KPAL6rYOH0hWWPEzencGdYj2w\", \"e\": \"AQAB\"}]}"; Mockito.when(httpResponse.body()).thenReturn(responseJson); - Mockito.when(httpClient.send(Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) + Mockito.when( + httpClient.send( + Mockito.any(HttpRequest.class), Mockito.any(HttpResponse.BodyHandler.class))) .thenReturn(httpResponse); HashSet trustedIssuers = new HashSet<>(); trustedIssuers.add("https://example.com"); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); resolver.client = httpClient; JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Mockito.when(mockJwsHeader.getKeyId()).thenReturn("test_key"); @@ -88,7 +90,8 @@ void testResolveSigningKeyWithRemotePublicKey() throws Exception { void testInvalidIssuer() throws Exception { HashSet trustedIssuers = new HashSet<>(); - DataHubJwtSigningKeyResolver resolver = new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); + DataHubJwtSigningKeyResolver resolver = + new DataHubJwtSigningKeyResolver(trustedIssuers, null, "RSA"); JwsHeader mockJwsHeader = Mockito.mock(JwsHeader.class); Claims mockClaims = Mockito.mock(Claims.class); resolver.resolveSigningKey(mockJwsHeader, mockClaims); @@ -120,8 +123,10 @@ private PublicKey getPublicKey(JSONObject token) throws Exception { if (token.get("kty").toString().equals("RSA")) { try { KeyFactory kf = KeyFactory.getInstance("RSA"); - BigInteger modulus = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); - BigInteger exponent = new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); + BigInteger modulus = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("n").toString())); + BigInteger exponent = + new BigInteger(1, Base64.getUrlDecoder().decode(token.get("e").toString())); publicKey = kf.generatePublic(new RSAPublicKeySpec(modulus, exponent)); } catch (InvalidKeySpecException e) { throw new InvalidKeySpecException("Invalid public key", e); diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java index 1c46e864a559e..ed10022632a56 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatefulTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticatorTest; @@ -13,14 +16,9 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import java.util.Date; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatefulTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -32,7 +30,8 @@ public class StatefulTokenServiceTest { public void testConstructor() { assertThrows(() -> new StatefulTokenService(null, null, null, null, null)); assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, null, null, null, null)); - assertThrows(() -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); + assertThrows( + () -> new StatefulTokenService(TEST_SIGNING_KEY, "UNSUPPORTED_ALG", null, null, null)); // Succeeds: new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); @@ -40,11 +39,12 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Verify token claims @@ -65,10 +65,15 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, - null, System.currentTimeMillis(), + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + datahub, + null, + System.currentTimeMillis(), "some token", "A token description", datahub.toUrnStr()); @@ -92,11 +97,12 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.SESSION, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.SESSION, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); @@ -118,14 +124,21 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatefulTokenService - tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); // Generate token that expires immediately. Date date = new Date(); - //This method returns the time in millis + // This method returns the time in millis long createdAtInMs = date.getTime(); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L, - createdAtInMs, "token", "", "urn:li:corpuser:datahub"); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, + new Actor(ActorType.USER, "datahub"), + 0L, + createdAtInMs, + "token", + "", + "urn:li:corpuser:datahub"); assertNotNull(token); // Validation should fail. @@ -134,12 +147,13 @@ public void testValidateAccessTokenFailsDueToExpiration() { @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); assertNotNull(token); // Change single character @@ -152,23 +166,30 @@ public void testValidateAccessTokenFailsDueToManipulation() { @Test public void generateRevokeToken() throws TokenException { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - final ConfigEntityRegistry configEntityRegistry = new ConfigEntityRegistry( - DataHubTokenAuthenticatorTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); - final AspectSpec keyAspectSpec = configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + final ConfigEntityRegistry configEntityRegistry = + new ConfigEntityRegistry( + DataHubTokenAuthenticatorTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); + final AspectSpec keyAspectSpec = + configEntityRegistry.getEntitySpec(Constants.ACCESS_TOKEN_ENTITY_NAME).getKeyAspectSpec(); Mockito.when(mockService.getEntityRegistry()).thenReturn(configEntityRegistry); - Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))).thenReturn(keyAspectSpec); + Mockito.when(mockService.getKeyAspectSpec(Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME))) + .thenReturn(keyAspectSpec); Mockito.when(mockService.exists(Mockito.any(Urn.class))).thenReturn(true); final RollbackRunResult result = new RollbackRunResult(ImmutableList.of(), 0); Mockito.when(mockService.deleteUrn(Mockito.any(Urn.class))).thenReturn(result); - StatefulTokenService tokenService = new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); + StatefulTokenService tokenService = + new StatefulTokenService(TEST_SIGNING_KEY, "HS256", null, mockService, TEST_SALTING_KEY); Actor datahub = new Actor(ActorType.USER, "datahub"); - String token = tokenService.generateAccessToken(TokenType.PERSONAL, datahub, "some token", - "A token description", - datahub.toUrnStr()); + String token = + tokenService.generateAccessToken( + TokenType.PERSONAL, datahub, "some token", "A token description", datahub.toUrnStr()); // Revoke token tokenService.revokeAccessToken(tokenService.hash(token)); @@ -177,7 +198,5 @@ public void generateRevokeToken() throws TokenException { assertThrows(TokenException.class, () -> tokenService.validateAccessToken(token)); } - private void mockStateful() { - - } + private void mockStateful() {} } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java index 4268521a07c0c..841308441569d 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/token/StatelessTokenServiceTest.java @@ -1,5 +1,8 @@ package com.datahub.authentication.token; +import static com.datahub.authentication.token.TokenClaims.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.authenticator.DataHubTokenAuthenticator; @@ -15,10 +18,6 @@ import javax.crypto.spec.SecretKeySpec; import org.testng.annotations.Test; -import static com.datahub.authentication.token.TokenClaims.*; -import static org.testng.Assert.*; - - public class StatelessTokenServiceTest { private static final String TEST_SIGNING_KEY = "WnEdIeTG/VVCLQqGwC/BAkqyY0k+H8NEAtWGejrBI94="; @@ -37,8 +36,11 @@ public void testConstructor() { @Test public void testGenerateAccessTokenPersonalToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -59,10 +61,11 @@ public void testGenerateAccessTokenPersonalToken() throws Exception { @Test public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, - new Actor(ActorType.USER, "datahub"), - null); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), null); assertNotNull(token); // Verify token claims @@ -83,8 +86,11 @@ public void testGenerateAccessTokenPersonalTokenEternal() throws Exception { @Test public void testGenerateAccessTokenSessionToken() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.SESSION, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.SESSION, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Verify token claims @@ -105,26 +111,34 @@ public void testGenerateAccessTokenSessionToken() throws Exception { @Test public void testValidateAccessTokenFailsDueToExpiration() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Generate token that expires immediately. - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub"), 0L); assertNotNull(token); // Validation should fail. - assertThrows(TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); + assertThrows( + TokenExpiredException.class, () -> statelessTokenService.validateAccessToken(token)); } @Test public void testValidateAccessTokenFailsDueToManipulation() { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); - String token = statelessTokenService.generateAccessToken(TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + String token = + statelessTokenService.generateAccessToken( + TokenType.PERSONAL, new Actor(ActorType.USER, "datahub")); assertNotNull(token); // Change single character String changedToken = token.substring(1); // Validation should fail. - assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); + assertThrows( + TokenException.class, () -> statelessTokenService.validateAccessToken(changedToken)); } @Test @@ -134,31 +148,37 @@ public void testValidateAccessTokenFailsDueToNoneAlgorithm() { "eyJhbGciOiJub25lIn0.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6Il9fZGF0YWh1Yl9zeXN0ZW0iL" + "CJ0eXBlIjoiU0VTU0lPTiIsInZlcnNpb24iOiIxIiwianRpIjoiN2VmOTkzYjQtMjBiOC00Y2Y5LTljNm" + "YtMTE2NjNjZWVmOTQzIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9."; - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); // Validation should fail. assertThrows(TokenException.class, () -> statelessTokenService.validateAccessToken(badToken)); } @Test public void testValidateAccessTokenFailsDueToUnsupportedSigningAlgorithm() throws Exception { - StatelessTokenService statelessTokenService = new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); + StatelessTokenService statelessTokenService = + new StatelessTokenService(TEST_SIGNING_KEY, "HS256"); Map claims = new HashMap<>(); - claims.put(TOKEN_VERSION_CLAIM_NAME, String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. + claims.put( + TOKEN_VERSION_CLAIM_NAME, + String.valueOf(TokenVersion.ONE.numericValue)); // Hardcode version 1 for now. claims.put(TOKEN_TYPE_CLAIM_NAME, "SESSION"); claims.put(ACTOR_TYPE_CLAIM_NAME, "USER"); claims.put(ACTOR_ID_CLAIM_NAME, "__datahub_system"); - final JwtBuilder builder = Jwts.builder() - .addClaims(claims) - .setId(UUID.randomUUID().toString()) - .setIssuer("datahub-metadata-service") - .setSubject("datahub"); - builder.setExpiration(new Date(System.currentTimeMillis() + 60)); + final JwtBuilder builder = + Jwts.builder() + .addClaims(claims) + .setId(UUID.randomUUID().toString()) + .setIssuer("datahub-metadata-service") + .setSubject("datahub"); + builder.setExpiration(new Date(System.currentTimeMillis() + 60)); final String testSigningKey = "TLHLdPSivAwIjXP4MT4TtlitsEGkOKjQGNnqsprisfghpU8g"; - byte [] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); - final Key signingKey = new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); + byte[] apiKeySecretBytes = testSigningKey.getBytes(StandardCharsets.UTF_8); + final Key signingKey = + new SecretKeySpec(apiKeySecretBytes, SignatureAlgorithm.HS384.getJcaName()); final String badToken = builder.signWith(signingKey, SignatureAlgorithm.HS384).compact(); // Validation should fail. diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 0102311ff3b61..9cb5d5cb697cc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.authentication.user; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class NativeUserServiceTest { private static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; @@ -52,39 +51,60 @@ public void setupTest() throws Exception { AuthenticationConfiguration authenticationConfiguration = new AuthenticationConfiguration(); authenticationConfiguration.setSystemClientId("someCustomId"); - _nativeUserService = new NativeUserService(_entityService, _entityClient, _secretService, authenticationConfiguration); + _nativeUserService = + new NativeUserService( + _entityService, _entityClient, _secretService, authenticationConfiguration); } @Test public void testCreateNativeUserNullArguments() { assertThrows( - () -> _nativeUserService.createNativeUser(null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, - SYSTEM_AUTHENTICATION)); - assertThrows(() -> _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, - SYSTEM_AUTHENTICATION)); - } - - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + () -> + _nativeUserService.createNativeUser( + null, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, null, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, null, TITLE, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, null, PASSWORD, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, null, SYSTEM_AUTHENTICATION)); + } + + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserAlreadyExists() throws Exception { // The user already exists when(_entityService.exists(any())).thenReturn(true); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserDatahub() throws Exception { - _nativeUserService.createNativeUser(DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + DATAHUB_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "This user already exists! Cannot create a new user.") public void testCreateNativeUserUserSystemUser() throws Exception { - _nativeUserService.createNativeUser(SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + SYSTEM_ACTOR, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -94,7 +114,8 @@ public void testCreateNativeUserPasses() throws Exception { when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); when(_secretService.getHashedPassword(any(), any())).thenReturn(HASHED_PASSWORD); - _nativeUserService.createNativeUser(USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); + _nativeUserService.createNativeUser( + USER_URN_STRING, FULL_NAME, EMAIL, TITLE, PASSWORD, SYSTEM_AUTHENTICATION); } @Test @@ -121,13 +142,17 @@ public void testUpdateCorpUserCredentialsPasses() throws Exception { @Test public void testGenerateNativeUserResetTokenNullArguments() { - assertThrows(() -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> _nativeUserService.generateNativeUserPasswordResetToken(null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User does not exist or is a non-native user!") public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { // Nonexistent corpUserCredentials - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); _nativeUserService.generateNativeUserPasswordResetToken(USER_URN_STRING, SYSTEM_AUTHENTICATION); } @@ -135,8 +160,8 @@ public void testGenerateNativeUserResetTokenNotNativeUser() throws Exception { @Test public void testGenerateNativeUserResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); @@ -148,80 +173,101 @@ public void testGenerateNativeUserResetToken() throws Exception { @Test public void testResetCorpUserCredentialsNullArguments() { - assertThrows(() -> _nativeUserService.resetCorpUserCredentials(null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + null, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION)); + assertThrows( + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, null, RESET_TOKEN, SYSTEM_AUTHENTICATION)); assertThrows( - () -> _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); + () -> + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, null, SYSTEM_AUTHENTICATION)); } - @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "User has not generated a password reset token!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = "User has not generated a password reset token!") public void testResetCorpUserCredentialsNoPasswordResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); // No password reset token when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(false); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Invalid reset token. Please ask your administrator to send you an updated link!") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Invalid reset token. Please ask your administrator to send you an updated link!") public void testResetCorpUserCredentialsBadResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().toEpochMilli()); // Reset token won't match when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn("badResetToken"); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } - @Test(expectedExceptions = RuntimeException.class, - expectedExceptionsMessageRegExp = "Reset token has expired! Please ask your administrator to create a new one") + @Test( + expectedExceptions = RuntimeException.class, + expectedExceptionsMessageRegExp = + "Reset token has expired! Please ask your administrator to create a new one") public void testResetCorpUserCredentialsExpiredResetToken() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - // Reset token expiration time will be before the system time when we run resetCorpUserCredentials + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + // Reset token expiration time will be before the system time when we run + // resetCorpUserCredentials when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn(0L); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); } @Test public void testResetCorpUserCredentialsPasses() throws Exception { CorpUserCredentials mockCorpUserCredentialsAspect = mock(CorpUserCredentials.class); - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn( - mockCorpUserCredentialsAspect); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(mockCorpUserCredentialsAspect); when(mockCorpUserCredentialsAspect.hasSalt()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasHashedPassword()).thenReturn(true); when(mockCorpUserCredentialsAspect.hasPasswordResetToken()).thenReturn(true); when(mockCorpUserCredentialsAspect.getPasswordResetToken()).thenReturn(ENCRYPTED_RESET_TOKEN); - when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()).thenReturn(true); - when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()).thenReturn( - Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); + when(mockCorpUserCredentialsAspect.hasPasswordResetTokenExpirationTimeMillis()) + .thenReturn(true); + when(mockCorpUserCredentialsAspect.getPasswordResetTokenExpirationTimeMillis()) + .thenReturn(Instant.now().plusMillis(ONE_DAY_MILLIS).toEpochMilli()); when(_secretService.decrypt(eq(ENCRYPTED_RESET_TOKEN))).thenReturn(RESET_TOKEN); when(_secretService.generateSalt(anyInt())).thenReturn(SALT); when(_secretService.encrypt(any())).thenReturn(ENCRYPTED_SALT); - _nativeUserService.resetCorpUserCredentials(USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); + _nativeUserService.resetCorpUserCredentials( + USER_URN_STRING, PASSWORD, RESET_TOKEN, SYSTEM_AUTHENTICATION); verify(_entityClient).ingestProposal(any(), any()); } @@ -233,7 +279,8 @@ public void testDoesPasswordMatchNullArguments() { @Test public void testDoesPasswordMatchNoCorpUserCredentials() throws Exception { - when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))).thenReturn(null); + when(_entityService.getLatestAspect(any(), eq(CORP_USER_CREDENTIALS_ASPECT_NAME))) + .thenReturn(null); assertFalse(_nativeUserService.doesPasswordMatch(USER_URN_STRING, PASSWORD)); } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index b0b206001209c..ffee378a363c7 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -1,4 +1,19 @@ package com.datahub.authorization; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; +import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; +import static org.mockito.ArgumentMatchers.isNull; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -30,7 +45,6 @@ import com.linkedin.policy.DataHubActorFilter; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.policy.DataHubResourceFilter; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -38,34 +52,18 @@ import java.util.Map; import java.util.Optional; import java.util.Set; - +import javax.annotation.Nullable; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import javax.annotation.Nullable; - -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.ACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.INACTIVE_POLICY_STATE; -import static com.linkedin.metadata.authorization.PoliciesConfig.METADATA_POLICY_TYPE; -import static org.mockito.ArgumentMatchers.isNull; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; -import static org.testng.Assert.assertFalse; - - public class DataHubAuthorizerTest { public static final String DATAHUB_SYSTEM_CLIENT_ID = "__datahub_system"; private static final Urn PARENT_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:parent"); private static final Urn CHILD_DOMAIN_URN = UrnUtils.getUrn("urn:li:domain:child"); - private static final Urn USER_WITH_ADMIN_ROLE = UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); + private static final Urn USER_WITH_ADMIN_ROLE = + UrnUtils.getUrn("urn:li:corpuser:user-with-admin"); private EntityClient _entityClient; private DataHubAuthorizer _dataHubAuthorizer; @@ -76,102 +74,158 @@ public void setupTest() throws Exception { // Init mocks. final Urn activePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:0"); - final DataHubPolicyInfo activePolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); + final DataHubPolicyInfo activePolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_TAGS"), null); final EnvelopedAspectMap activeAspectMap = new EnvelopedAspectMap(); - activeAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); + activeAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(activePolicy.data()))); final Urn inactivePolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:1"); - final DataHubPolicyInfo inactivePolicy = createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); + final DataHubPolicyInfo inactivePolicy = + createDataHubPolicyInfo(false, ImmutableList.of("EDIT_ENTITY_OWNERS"), null); final EnvelopedAspectMap inactiveAspectMap = new EnvelopedAspectMap(); - inactiveAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); + inactiveAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(inactivePolicy.data()))); final Urn parentDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:2"); - final DataHubPolicyInfo parentDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); + final DataHubPolicyInfo parentDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_DOCS"), PARENT_DOMAIN_URN); final EnvelopedAspectMap parentDomainPolicyAspectMap = new EnvelopedAspectMap(); - parentDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); + parentDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomainPolicy.data()))); final Urn childDomainPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:3"); - final DataHubPolicyInfo childDomainPolicy = createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); + final DataHubPolicyInfo childDomainPolicy = + createDataHubPolicyInfo(true, ImmutableList.of("EDIT_ENTITY_STATUS"), CHILD_DOMAIN_URN); final EnvelopedAspectMap childDomainPolicyAspectMap = new EnvelopedAspectMap(); - childDomainPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); + childDomainPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(childDomainPolicy.data()))); final Urn adminPolicyUrn = Urn.createFromString("urn:li:dataHubPolicy:4"); final DataHubActorFilter actorFilter = new DataHubActorFilter(); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); - final DataHubPolicyInfo adminPolicy = createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Admin")))); + final DataHubPolicyInfo adminPolicy = + createDataHubPolicyInfoFor(true, ImmutableList.of("EDIT_USER_PROFILE"), null, actorFilter); final EnvelopedAspectMap adminPolicyAspectMap = new EnvelopedAspectMap(); - adminPolicyAspectMap.put(DATAHUB_POLICY_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); + adminPolicyAspectMap.put( + DATAHUB_POLICY_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(adminPolicy.data()))); - final ScrollResult policySearchResult1 = new ScrollResult() + final ScrollResult policySearchResult1 = + new ScrollResult() .setScrollId("1") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(activePolicyUrn)))); - final ScrollResult policySearchResult2 = new ScrollResult() + final ScrollResult policySearchResult2 = + new ScrollResult() .setScrollId("2") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(inactivePolicyUrn)))); - final ScrollResult policySearchResult3 = new ScrollResult() + final ScrollResult policySearchResult3 = + new ScrollResult() .setScrollId("3") .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(parentDomainPolicyUrn)))); - final ScrollResult policySearchResult4 = new ScrollResult() - .setScrollId("4") - .setNumEntities(5) + final ScrollResult policySearchResult4 = + new ScrollResult() + .setScrollId("4") + .setNumEntities(5) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(childDomainPolicyUrn)))); + + final ScrollResult policySearchResult5 = + new ScrollResult() + .setNumEntities(5) .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(childDomainPolicyUrn)))); - - final ScrollResult policySearchResult5 = new ScrollResult() - .setNumEntities(5) - .setEntities( - new SearchEntityArray( - ImmutableList.of( - new SearchEntity().setEntity(adminPolicyUrn)))); - - when(_entityClient.scrollAcrossEntities(eq(List.of("dataHubPolicy")), eq(""), isNull(), any(), isNull(), - anyInt(), eq(new SearchFlags().setFulltext(true).setSkipAggregates(true).setSkipHighlighting(true).setSkipCache(true)), any())) - .thenReturn(policySearchResult1) - .thenReturn(policySearchResult2) - .thenReturn(policySearchResult3) - .thenReturn(policySearchResult4) - .thenReturn(policySearchResult5); - - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())).thenAnswer(args -> { - Set inputUrns = args.getArgument(1); - Urn urn = inputUrns.stream().findFirst().get(); - - switch (urn.toString()) { - case "urn:li:dataHubPolicy:0": - return Map.of(activePolicyUrn, new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); - case "urn:li:dataHubPolicy:1": - return Map.of(inactivePolicyUrn, new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); - case "urn:li:dataHubPolicy:2": - return Map.of(parentDomainPolicyUrn, new EntityResponse().setUrn(parentDomainPolicyUrn).setAspects(parentDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:3": - return Map.of(childDomainPolicyUrn, new EntityResponse().setUrn(childDomainPolicyUrn).setAspects(childDomainPolicyAspectMap)); - case "urn:li:dataHubPolicy:4": - return Map.of(adminPolicyUrn, new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); - default: - throw new IllegalStateException(); - } - }); - - final List userUrns = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user3"), Urn.createFromString("urn:li:corpuser:user4")); - final List groupUrns = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group3"), Urn.createFromString("urn:li:corpGroup:group4")); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(adminPolicyUrn)))); + + when(_entityClient.scrollAcrossEntities( + eq(List.of("dataHubPolicy")), + eq(""), + isNull(), + any(), + isNull(), + anyInt(), + eq( + new SearchFlags() + .setFulltext(true) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setSkipCache(true)), + any())) + .thenReturn(policySearchResult1) + .thenReturn(policySearchResult2) + .thenReturn(policySearchResult3) + .thenReturn(policySearchResult4) + .thenReturn(policySearchResult5); + + when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), any(), eq(null), any())) + .thenAnswer( + args -> { + Set inputUrns = args.getArgument(1); + Urn urn = inputUrns.stream().findFirst().get(); + + switch (urn.toString()) { + case "urn:li:dataHubPolicy:0": + return Map.of( + activePolicyUrn, + new EntityResponse().setUrn(activePolicyUrn).setAspects(activeAspectMap)); + case "urn:li:dataHubPolicy:1": + return Map.of( + inactivePolicyUrn, + new EntityResponse().setUrn(inactivePolicyUrn).setAspects(inactiveAspectMap)); + case "urn:li:dataHubPolicy:2": + return Map.of( + parentDomainPolicyUrn, + new EntityResponse() + .setUrn(parentDomainPolicyUrn) + .setAspects(parentDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:3": + return Map.of( + childDomainPolicyUrn, + new EntityResponse() + .setUrn(childDomainPolicyUrn) + .setAspects(childDomainPolicyAspectMap)); + case "urn:li:dataHubPolicy:4": + return Map.of( + adminPolicyUrn, + new EntityResponse().setUrn(adminPolicyUrn).setAspects(adminPolicyAspectMap)); + default: + throw new IllegalStateException(); + } + }); + + final List userUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4")); + final List groupUrns = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4")); EntityResponse ownershipResponse = new EntityResponse(); EnvelopedAspectMap ownershipAspectMap = new EnvelopedAspectMap(); - ownershipAspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); + ownershipAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new com.linkedin.entity.Aspect(createOwnershipAspect(userUrns, groupUrns).data()))); ownershipResponse.setAspects(ownershipAspectMap); when(_entityClient.getV2(any(), any(), eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())) .thenReturn(ownershipResponse); @@ -181,31 +235,45 @@ public void setupTest() throws Exception { .thenReturn(createDomainsResponse(CHILD_DOMAIN_URN)); // Mocks to get parent domains on a domain - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(CHILD_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(CHILD_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(PARENT_DOMAIN_URN)); // Mocks to reach the stopping point on domain parents - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(PARENT_DOMAIN_URN)), eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), any())) + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(PARENT_DOMAIN_URN)), + eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + any())) .thenReturn(createDomainPropertiesBatchResponse(null)); // Mocks to reach role membership for a user urn - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any()) - ).thenReturn(createUserRoleMembershipBatchResponse(USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); - - final Authentication systemAuthentication = new Authentication( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), - "" - ); - - _dataHubAuthorizer = new DataHubAuthorizer( - systemAuthentication, - _entityClient, - 10, - 10, - DataHubAuthorizer.AuthorizationMode.DEFAULT, - 1 // force pagination logic - ); - _dataHubAuthorizer.init(Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(USER_WITH_ADMIN_ROLE)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn( + createUserRoleMembershipBatchResponse( + USER_WITH_ADMIN_ROLE, UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + + final Authentication systemAuthentication = + new Authentication(new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID), ""); + + _dataHubAuthorizer = + new DataHubAuthorizer( + systemAuthentication, + _entityClient, + 10, + 10, + DataHubAuthorizer.AuthorizationMode.DEFAULT, + 1 // force pagination logic + ); + _dataHubAuthorizer.init( + Collections.emptyMap(), createAuthorizerContext(systemAuthentication, _entityClient)); _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) } @@ -217,11 +285,11 @@ public void testSystemAuthentication() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + new Actor(ActorType.USER, DATAHUB_SYSTEM_CLIENT_ID).toUrnStr(), + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -231,11 +299,9 @@ public void testAuthorizeGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -246,11 +312,9 @@ public void testAuthorizeNotGranted() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @@ -263,11 +327,9 @@ public void testAllowAllMode() throws Exception { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); // Policy for this privilege is inactive. - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_OWNERS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -278,11 +340,9 @@ public void testInvalidateCache() throws Exception { // First make sure that the default policies are as expected. EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); @@ -291,64 +351,78 @@ public void testInvalidateCache() throws Exception { emptyResult.setNumEntities(0); emptyResult.setEntities(new SearchEntityArray()); - when(_entityClient.search(eq("dataHubPolicy"), eq(""), isNull(), any(), anyInt(), anyInt(), any(), - eq(new SearchFlags().setFulltext(true)))).thenReturn(emptyResult); - when(_entityClient.batchGetV2(eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())).thenReturn( - Collections.emptyMap()); + when(_entityClient.search( + eq("dataHubPolicy"), + eq(""), + isNull(), + any(), + anyInt(), + anyInt(), + any(), + eq(new SearchFlags().setFulltext(true)))) + .thenReturn(emptyResult); + when(_entityClient.batchGetV2( + eq(POLICY_ENTITY_NAME), eq(Collections.emptySet()), eq(null), any())) + .thenReturn(Collections.emptyMap()); // Invalidate Cache. _dataHubAuthorizer.invalidateCache(); Thread.sleep(500); // Sleep so the runnable can execute. (not ideal) - // Now verify that invalidating the cache updates the policies by running the same authorization request. + // Now verify that invalidating the cache updates the policies by running the same authorization + // request. assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } @Test public void testAuthorizedActorsActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_ENTITY_TAGS", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_ENTITY_TAGS", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertTrue(actors.isAllUsers()); assertTrue(actors.isAllGroups()); - assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of( - Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2"), - Urn.createFromString("urn:li:corpuser:user3"), - Urn.createFromString("urn:li:corpuser:user4") - )); - - assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of( - Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), - Urn.createFromString("urn:li:corpGroup:group3"), - Urn.createFromString("urn:li:corpGroup:group4") - )); + assertEquals( + new HashSet<>(actors.getUsers()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), + Urn.createFromString("urn:li:corpuser:user3"), + Urn.createFromString("urn:li:corpuser:user4"))); + + assertEquals( + new HashSet<>(actors.getGroups()), + ImmutableSet.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString("urn:li:corpGroup:group3"), + Urn.createFromString("urn:li:corpGroup:group4"))); } @Test public void testAuthorizedRoleActivePolicy() throws Exception { final AuthorizedActors actors = - _dataHubAuthorizer.authorizedActors("EDIT_USER_PROFILE", // Should be inside the active policy. + _dataHubAuthorizer.authorizedActors( + "EDIT_USER_PROFILE", // Should be inside the active policy. Optional.of(new EntitySpec("dataset", "urn:li:dataset:1"))); assertFalse(actors.isAllUsers()); assertFalse(actors.isAllGroups()); assertEquals(new HashSet<>(actors.getUsers()), ImmutableSet.of()); assertEquals(new HashSet<>(actors.getGroups()), ImmutableSet.of()); - assertEquals(new HashSet<>(actors.getRoles()), ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); + assertEquals( + new HashSet<>(actors.getRoles()), + ImmutableSet.of(UrnUtils.getUrn("urn:li:dataHubRole:Admin"))); } @Test public void testAuthorizationBasedOnRoleIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - USER_WITH_ADMIN_ROLE.toString(), - "EDIT_USER_PROFILE", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + USER_WITH_ADMIN_ROLE.toString(), "EDIT_USER_PROFILE", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -357,11 +431,9 @@ public void testAuthorizationBasedOnRoleIsAllowed() { public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_STATUS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_STATUS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -370,11 +442,9 @@ public void testAuthorizationOnDomainWithPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOCS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOCS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.ALLOW); } @@ -383,19 +453,24 @@ public void testAuthorizationOnDomainWithParentPrivilegeIsAllowed() { public void testAuthorizationOnDomainWithoutPrivilegeIsDenied() { EntitySpec resourceSpec = new EntitySpec("dataset", "urn:li:dataset:test"); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:test", - "EDIT_ENTITY_DOC_LINKS", - Optional.of(resourceSpec) - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:test", "EDIT_ENTITY_DOC_LINKS", Optional.of(resourceSpec)); assertEquals(_dataHubAuthorizer.authorize(request).getType(), AuthorizationResult.Type.DENY); } - private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List privileges, @Nullable final Urn domain) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfo( + boolean active, List privileges, @Nullable final Urn domain) throws Exception { - List users = ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2")); - List groups = ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), Urn.createFromString("urn:li:corpGroup:group2")); + List users = + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")); + List groups = + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")); final DataHubActorFilter actorFilter = new DataHubActorFilter(); actorFilter.setResourceOwners(true); @@ -407,8 +482,12 @@ private DataHubPolicyInfo createDataHubPolicyInfo(boolean active, List p return createDataHubPolicyInfoFor(active, privileges, domain, actorFilter); } - private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List privileges, - @Nullable final Urn domain, DataHubActorFilter actorFilter) throws Exception { + private DataHubPolicyInfo createDataHubPolicyInfoFor( + boolean active, + List privileges, + @Nullable final Urn domain, + DataHubActorFilter actorFilter) + throws Exception { final DataHubPolicyInfo dataHubPolicyInfo = new DataHubPolicyInfo(); dataHubPolicyInfo.setType(METADATA_POLICY_TYPE); dataHubPolicyInfo.setState(active ? ACTIVE_POLICY_STATE : INACTIVE_POLICY_STATE); @@ -424,7 +503,10 @@ private DataHubPolicyInfo createDataHubPolicyInfoFor(boolean active, List userOwners, final List groupOwners) throws Exception { + private Ownership createOwnershipAspect(final List userOwners, final List groupOwners) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); if (userOwners != null) { - userOwners.forEach(userUrn -> { - final Owner userOwner = new Owner(); - userOwner.setOwner(userUrn); - userOwner.setType(OwnershipType.DATAOWNER); - owners.add(userOwner); - } - ); + userOwners.forEach( + userUrn -> { + final Owner userOwner = new Owner(); + userOwner.setOwner(userUrn); + userOwner.setType(OwnershipType.DATAOWNER); + owners.add(userOwner); + }); } if (groupOwners != null) { - groupOwners.forEach(groupUrn -> { - final Owner groupOwner = new Owner(); - groupOwner.setOwner(groupUrn); - groupOwner.setType(OwnershipType.DATAOWNER); - owners.add(groupOwner); - }); + groupOwners.forEach( + groupUrn -> { + final Owner groupOwner = new Owner(); + groupOwner.setOwner(groupUrn); + groupOwner.setType(OwnershipType.DATAOWNER); + owners.add(groupOwner); + }); } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -466,13 +551,15 @@ private EntityResponse createDomainsResponse(final Urn domainUrn) { EnvelopedAspectMap domainsAspectMap = new EnvelopedAspectMap(); final Domains domains = new Domains(); domains.setDomains(new UrnArray(domainUrns)); - domainsAspectMap.put(DOMAINS_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(domains.data()))); + domainsAspectMap.put( + DOMAINS_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(domains.data()))); domainsResponse.setAspects(domainsAspectMap); return domainsResponse; } - private Map createDomainPropertiesBatchResponse(@Nullable final Urn parentDomainUrn) { + private Map createDomainPropertiesBatchResponse( + @Nullable final Urn parentDomainUrn) { final Map batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -480,14 +567,16 @@ private Map createDomainPropertiesBatchResponse(@Nullable f if (parentDomainUrn != null) { properties.setParentDomain(parentDomainUrn); } - aspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(properties.data()))); + aspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(properties.data()))); response.setAspects(aspectMap); batchResponse.put(parentDomainUrn, response); return batchResponse; } - private Map createUserRoleMembershipBatchResponse(final Urn userUrn, @Nullable final Urn roleUrn) { + private Map createUserRoleMembershipBatchResponse( + final Urn userUrn, @Nullable final Urn roleUrn) { final Map batchResponse = new HashMap<>(); final EntityResponse response = new EntityResponse(); EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); @@ -495,14 +584,17 @@ private Map createUserRoleMembershipBatchResponse(final Urn if (roleUrn != null) { membership.setRoles(new UrnArray(roleUrn)); } - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect() - .setValue(new com.linkedin.entity.Aspect(membership.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(membership.data()))); response.setAspects(aspectMap); batchResponse.put(userUrn, response); return batchResponse; } - private AuthorizerContext createAuthorizerContext(final Authentication systemAuthentication, final EntityClient entityClient) { - return new AuthorizerContext(Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); + private AuthorizerContext createAuthorizerContext( + final Authentication systemAuthentication, final EntityClient entityClient) { + return new AuthorizerContext( + Collections.emptyMap(), new DefaultEntitySpecResolver(systemAuthentication, entityClient)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java index 2790c16ba75e6..08ec91d5287dc 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/PolicyEngineTest.java @@ -1,5 +1,10 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.PoliciesConfig.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -32,12 +37,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class PolicyEngineTest { private static final String AUTHORIZED_PRINCIPAL = "urn:li:corpuser:datahub"; @@ -46,7 +45,8 @@ public class PolicyEngineTest { private static final String RESOURCE_URN = "urn:li:dataset:test"; private static final String DOMAIN_URN = "urn:li:domain:domain1"; private static final String OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__technical_owner"; - private static final String OTHER_OWNERSHIP_TYPE_URN = "urn:li:ownershipType:__system__data_steward"; + private static final String OTHER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__data_steward"; private EntityClient _entityClient; private PolicyEngine _policyEngine; @@ -63,10 +63,16 @@ public void setupTest() throws Exception { _policyEngine = new PolicyEngine(Mockito.mock(Authentication.class), _entityClient); authorizedUserUrn = Urn.createFromString(AUTHORIZED_PRINCIPAL); - resolvedAuthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, AUTHORIZED_PRINCIPAL, - Collections.emptySet(), Collections.emptySet(), Collections.singleton(AUTHORIZED_GROUP)); + resolvedAuthorizedUserSpec = + buildEntityResolvers( + CORP_USER_ENTITY_NAME, + AUTHORIZED_PRINCIPAL, + Collections.emptySet(), + Collections.emptySet(), + Collections.singleton(AUTHORIZED_GROUP)); unauthorizedUserUrn = Urn.createFromString(UNAUTHORIZED_PRINCIPAL); - resolvedUnauthorizedUserSpec = buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); + resolvedUnauthorizedUserSpec = + buildEntityResolvers(CORP_USER_ENTITY_NAME, UNAUTHORIZED_PRINCIPAL); resourceUrn = Urn.createFromString(RESOURCE_URN); // Init role membership mocks. @@ -74,25 +80,39 @@ public void setupTest() throws Exception { authorizedEntityResponse.setUrn(authorizedUserUrn); Map authorizedEntityResponseMap = Collections.singletonMap(authorizedUserUrn, authorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(authorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(authorizedEntityResponseMap); EntityResponse unauthorizedEntityResponse = createUnauthorizedEntityResponse(); unauthorizedEntityResponse.setUrn(unauthorizedUserUrn); Map unauthorizedEntityResponseMap = Collections.singletonMap(unauthorizedUserUrn, unauthorizedEntityResponse); - when(_entityClient.batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(unauthorizedUserUrn)), - eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), any())).thenReturn(unauthorizedEntityResponseMap); + when(_entityClient.batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + eq(Collections.singleton(ROLE_MEMBERSHIP_ASPECT_NAME)), + any())) + .thenReturn(unauthorizedEntityResponseMap); // Init ownership type mocks. EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); + envelopedAspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new com.linkedin.entity.Aspect(createOwnershipAspect(true, true).data()))); entityResponse.setAspects(envelopedAspectMap); Map mockMap = mock(Map.class); - when(_entityClient.batchGetV2(any(), eq(Collections.singleton(resourceUrn)), - eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), any())).thenReturn(mockMap); + when(_entityClient.batchGetV2( + any(), + eq(Collections.singleton(resourceUrn)), + eq(Collections.singleton(OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(mockMap); when(mockMap.get(eq(resourceUrn))).thenReturn(entityResponse); } @@ -121,7 +141,10 @@ public void testEvaluatePolicyInactivePolicyState() { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -150,7 +173,10 @@ public void testEvaluatePolicyPrivilegeFilterNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_OWNERS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_OWNERS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -175,8 +201,8 @@ public void testEvaluatePlatformPolicyPrivilegeFilterMatch() throws Exception { dataHubPolicyInfo.setActors(actorFilter); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", - Optional.empty()); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, resolvedAuthorizedUserSpec, "MANAGE_POLICIES", Optional.empty()); assertTrue(result.isGranted()); // Verify no network calls @@ -211,7 +237,10 @@ public void testEvaluatePolicyActorFilterUserMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert Authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -248,7 +277,10 @@ public void testEvaluatePolicyActorFilterUserNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + buildEntityResolvers(CORP_USER_ENTITY_NAME, "urn:li:corpuser:test"), + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -285,7 +317,10 @@ public void testEvaluatePolicyActorFilterGroupMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -321,7 +356,10 @@ public void testEvaluatePolicyActorFilterGroupNoMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -358,14 +396,18 @@ public void testEvaluatePolicyActorFilterRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult authorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(authorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(authorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), any(), any()); } @Test @@ -397,14 +439,21 @@ public void testEvaluatePolicyActorFilterNoRoleMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags. PolicyEngine.PolicyEvaluationResult unauthorizedResult = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(unauthorizedResult.isGranted()); // Verify we are only calling for roles during these requests. - verify(_entityClient, times(1)).batchGetV2(eq(CORP_USER_ENTITY_NAME), - eq(Collections.singleton(unauthorizedUserUrn)), any(), any()); + verify(_entityClient, times(1)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(unauthorizedUserUrn)), + any(), + any()); } @Test @@ -432,13 +481,19 @@ public void testEvaluatePolicyActorFilterAllUsersMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result2.isGranted()); @@ -471,13 +526,19 @@ public void testEvaluatePolicyActorFilterAllGroupsMatch() throws Exception { ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert authorized user can edit entity tags, because of group membership. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -509,17 +570,30 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersMatch() throws Except final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -542,7 +616,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -552,18 +627,32 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeMatch() throws Ex final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(createOwnershipAspectWithTypeUrn(OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), - Collections.emptySet()); - + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertTrue(result1.isGranted()); // Verify no network calls @@ -585,7 +674,8 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setResourceOwnersTypes(new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); + actorFilter.setResourceOwnersTypes( + new UrnArray(ImmutableList.of(Urn.createFromString(OWNERSHIP_TYPE_URN)))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -595,17 +685,33 @@ public void testEvaluatePolicyActorFilterUserResourceOwnersTypeNoMatch() throws final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(createOwnershipAspectWithTypeUrn(OTHER_OWNERSHIP_TYPE_URN).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL), Collections.emptySet(), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL), + Collections.emptySet(), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", - Optional.of(resourceSpec)); + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", + Optional.of(resourceSpec)); assertFalse(result1.isGranted()); // Verify no network calls @@ -636,17 +742,30 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersMatch() throws Excep final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(false, true).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_GROUP), Collections.emptySet(), + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_GROUP), + Collections.emptySet(), Collections.emptySet()); // Assert authorized user can edit entity tags, because he is a user owner. PolicyEngine.PolicyEvaluationResult result1 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result1.isGranted()); @@ -678,7 +797,10 @@ public void testEvaluatePolicyActorFilterGroupResourceOwnersNoMatch() throws Exc ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); // Assert unauthorized user cannot edit entity tags. PolicyEngine.PolicyEvaluationResult result2 = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedUnauthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedUnauthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result2.isGranted()); @@ -708,9 +830,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesMatch() throws Exception dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A dataset Authorized principal _does not own_. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -739,9 +865,13 @@ public void testEvaluatePolicyResourceFilterAllResourcesNoMatch() throws Excepti resourceFilter.setType("dataset"); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. + ResolvedEntitySpec resourceSpec = + buildEntityResolvers("chart", RESOURCE_URN); // Notice: Not a dataset. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -776,7 +906,10 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchLegacy() throws ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -801,14 +934,21 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatch() throws Excep dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -833,15 +973,23 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatch() throws Exc dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -866,15 +1014,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceMatchDomain() throws dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), Collections.singleton(DOMAIN_URN), Collections.emptySet()); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertTrue(result.isGranted()); @@ -899,15 +1059,27 @@ public void testEvaluatePolicyResourceFilterSpecificResourceNoMatchDomain() thro dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); - resourceFilter.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy(dataHubPolicyInfo, resolvedAuthorizedUserSpec, "EDIT_ENTITY_TAGS", + _policyEngine.evaluatePolicy( + dataHubPolicyInfo, + resolvedAuthorizedUserSpec, + "EDIT_ENTITY_TAGS", Optional.of(resourceSpec)); assertFalse(result.isGranted()); @@ -933,9 +1105,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo1.setActors(actorFilter1); final DataHubResourceFilter resourceFilter1 = new DataHubResourceFilter(); - resourceFilter1.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), EntityFieldType.DOMAIN, - Collections.singletonList(DOMAIN_URN)))); + resourceFilter1.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.DOMAIN, + Collections.singletonList(DOMAIN_URN)))); dataHubPolicyInfo1.setResources(resourceFilter1); // Policy 2, match dataset type and resource @@ -954,9 +1130,13 @@ public void testGetGrantedPrivileges() throws Exception { dataHubPolicyInfo2.setActors(actorFilter2); final DataHubResourceFilter resourceFilter2 = new DataHubResourceFilter(); - resourceFilter2.setFilter(FilterUtils.newFilter( - ImmutableMap.of(EntityFieldType.TYPE, Collections.singletonList("dataset"), - EntityFieldType.URN, Collections.singletonList(RESOURCE_URN)))); + resourceFilter2.setFilter( + FilterUtils.newFilter( + ImmutableMap.of( + EntityFieldType.TYPE, + Collections.singletonList("dataset"), + EntityFieldType.URN, + Collections.singletonList(RESOURCE_URN)))); dataHubPolicyInfo2.setResources(resourceFilter2); // Policy 3, match dataset type and owner (legacy resource filter) @@ -982,43 +1162,80 @@ public void testGetGrantedPrivileges() throws Exception { final List policies = ImmutableList.of(dataHubPolicyInfo1, dataHubPolicyInfo2, dataHubPolicyInfo3); - assertEquals(_policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), + assertEquals( + _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.empty()), Collections.emptyList()); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Everything matches + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Everything matches assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.emptySet(), - Collections.singleton("urn:li:domain:domain2"), Collections.emptySet()); // Domain doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.emptySet(), + Collections.singleton("urn:li:domain:domain2"), + Collections.emptySet()); // Domain doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_2_1", "PRIVILEGE_2_2")); - resourceSpec = buildEntityResolvers("dataset", "urn:li:dataset:random", Collections.emptySet(), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource doesn't match + resourceSpec = + buildEntityResolvers( + "dataset", + "urn:li:dataset:random", + Collections.emptySet(), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1")); final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put(OWNERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); + aspectMap.put( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createOwnershipAspect(true, false).data()))); entityResponse.setAspects(aspectMap); - when(_entityClient.getV2(eq(resourceUrn.getEntityType()), eq(resourceUrn), eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), - any())).thenReturn(entityResponse); - resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Is owner + when(_entityClient.getV2( + eq(resourceUrn.getEntityType()), + eq(resourceUrn), + eq(Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME)), + any())) + .thenReturn(entityResponse); + resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Is owner assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), ImmutableList.of("PRIVILEGE_1", "PRIVILEGE_2_1", "PRIVILEGE_2_2", "PRIVILEGE_3")); - resourceSpec = buildEntityResolvers("chart", RESOURCE_URN, Collections.singleton(AUTHORIZED_PRINCIPAL), - Collections.singleton(DOMAIN_URN), Collections.emptySet()); // Resource type doesn't match + resourceSpec = + buildEntityResolvers( + "chart", + RESOURCE_URN, + Collections.singleton(AUTHORIZED_PRINCIPAL), + Collections.singleton(DOMAIN_URN), + Collections.emptySet()); // Resource type doesn't match assertEquals( - _policyEngine.getGrantedPrivileges(policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), + _policyEngine.getGrantedPrivileges( + policies, resolvedAuthorizedUserSpec, Optional.of(resourceSpec)), Collections.emptyList()); } @@ -1037,10 +1254,16 @@ public void testGetMatchingActorsResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:role:Admin")))); dataHubPolicyInfo.setActors(actorFilter); @@ -1053,27 +1276,43 @@ public void testGetMatchingActorsResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), - Collections.emptySet(), Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(AUTHORIZED_PRINCIPAL, AUTHORIZED_GROUP), + Collections.emptySet(), + Collections.emptySet()); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertTrue(actors.getAllUsers()); assertTrue(actors.getAllGroups()); - assertEquals(actors.getUsers(), - ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), Urn.createFromString("urn:li:corpuser:user2"), + assertEquals( + actors.getUsers(), + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2"), Urn.createFromString(AUTHORIZED_PRINCIPAL) // Resource Owner - )); + )); - assertEquals(actors.getGroups(), ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2"), Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner - )); + assertEquals( + actors.getGroups(), + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2"), + Urn.createFromString(AUTHORIZED_GROUP) // Resource Owner + )); assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:role:Admin"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } @Test @@ -1091,10 +1330,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(true); actorFilter.setAllGroups(true); - actorFilter.setUsers(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpuser:user1"), - Urn.createFromString("urn:li:corpuser:user2")))); - actorFilter.setGroups(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:corpGroup:group1"), - Urn.createFromString("urn:li:corpGroup:group2")))); + actorFilter.setUsers( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpuser:user1"), + Urn.createFromString("urn:li:corpuser:user2")))); + actorFilter.setGroups( + new UrnArray( + ImmutableList.of( + Urn.createFromString("urn:li:corpGroup:group1"), + Urn.createFromString("urn:li:corpGroup:group2")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1106,14 +1351,16 @@ public void testGetMatchingActorsNoResourceMatch() throws Exception { dataHubPolicyInfo.setResources(resourceFilter); ResolvedEntitySpec resourceSpec = - buildEntityResolvers("dataset", "urn:li:dataset:random"); // A resource not covered by the policy. - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + buildEntityResolvers( + "dataset", "urn:li:dataset:random"); // A resource not covered by the policy. + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), Collections.emptyList()); assertEquals(actors.getGroups(), Collections.emptyList()); - //assertEquals(actors.getRoles(), Collections.emptyList()); + // assertEquals(actors.getRoles(), Collections.emptyList()); // Verify no network calls verify(_entityClient, times(0)).batchGetV2(any(), any(), any(), any()); @@ -1133,7 +1380,8 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { actorFilter.setResourceOwners(true); actorFilter.setAllUsers(false); actorFilter.setAllGroups(false); - actorFilter.setRoles(new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); + actorFilter.setRoles( + new UrnArray(ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor")))); dataHubPolicyInfo.setActors(actorFilter); final DataHubResourceFilter resourceFilter = new DataHubResourceFilter(); @@ -1144,24 +1392,36 @@ public void testGetMatchingActorsByRoleResourceMatch() throws Exception { resourceFilter.setResources(resourceUrns); dataHubPolicyInfo.setResources(resourceFilter); - ResolvedEntitySpec resourceSpec = buildEntityResolvers("dataset", RESOURCE_URN, ImmutableSet.of(), - Collections.emptySet(), Collections.emptySet()); + ResolvedEntitySpec resourceSpec = + buildEntityResolvers( + "dataset", + RESOURCE_URN, + ImmutableSet.of(), + Collections.emptySet(), + Collections.emptySet()); - PolicyEngine.PolicyActors actors = _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); + PolicyEngine.PolicyActors actors = + _policyEngine.getMatchingActors(dataHubPolicyInfo, Optional.of(resourceSpec)); assertFalse(actors.getAllUsers()); assertFalse(actors.getAllGroups()); assertEquals(actors.getUsers(), ImmutableList.of()); assertEquals(actors.getGroups(), ImmutableList.of()); - assertEquals(actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); + assertEquals( + actors.getRoles(), ImmutableList.of(Urn.createFromString("urn:li:dataHubRole:Editor"))); // Verify aspect client called, entity client not called. - verify(_entityClient, times(0)).batchGetV2(eq(CORP_USER_ENTITY_NAME), eq(Collections.singleton(authorizedUserUrn)), - eq(null), any()); + verify(_entityClient, times(0)) + .batchGetV2( + eq(CORP_USER_ENTITY_NAME), + eq(Collections.singleton(authorizedUserUrn)), + eq(null), + any()); } - private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) throws Exception { + private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolean addGroupOwner) + throws Exception { final Ownership ownershipAspect = new Ownership(); final OwnerArray owners = new OwnerArray(); @@ -1180,7 +1440,8 @@ private Ownership createOwnershipAspect(final Boolean addUserOwner, final Boolea } ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1194,7 +1455,8 @@ private Ownership createOwnershipAspectWithTypeUrn(final String typeUrn) throws owners.add(userOwner); ownershipAspect.setOwners(owners); - ownershipAspect.setLastModified(new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); + ownershipAspect.setLastModified( + new AuditStamp().setTime(0).setActor(Urn.createFromString("urn:li:corpuser:foo"))); return ownershipAspect; } @@ -1206,7 +1468,9 @@ private EntityResponse createAuthorizedEntityResponse() throws URISyntaxExceptio final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:admin")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; @@ -1220,24 +1484,41 @@ private EntityResponse createUnauthorizedEntityResponse() throws URISyntaxExcept final UrnArray roles = new UrnArray(); roles.add(Urn.createFromString("urn:li:dataHubRole:reader")); rolesAspect.setRoles(roles); - aspectMap.put(ROLE_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); + aspectMap.put( + ROLE_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(rolesAspect.data()))); entityResponse.setAspects(aspectMap); return entityResponse; } public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn) { - return buildEntityResolvers(entityType, entityUrn, Collections.emptySet(), Collections.emptySet(), Collections.emptySet()); + return buildEntityResolvers( + entityType, + entityUrn, + Collections.emptySet(), + Collections.emptySet(), + Collections.emptySet()); } - public static ResolvedEntitySpec buildEntityResolvers(String entityType, String entityUrn, Set owners, - Set domains, Set groups) { - return new ResolvedEntitySpec(new EntitySpec(entityType, entityUrn), - ImmutableMap.of(EntityFieldType.TYPE, - FieldResolver.getResolverFromValues(Collections.singleton(entityType)), EntityFieldType.URN, - FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), EntityFieldType.OWNER, - FieldResolver.getResolverFromValues(owners), EntityFieldType.DOMAIN, - FieldResolver.getResolverFromValues(domains), EntityFieldType.GROUP_MEMBERSHIP, + public static ResolvedEntitySpec buildEntityResolvers( + String entityType, + String entityUrn, + Set owners, + Set domains, + Set groups) { + return new ResolvedEntitySpec( + new EntitySpec(entityType, entityUrn), + ImmutableMap.of( + EntityFieldType.TYPE, + FieldResolver.getResolverFromValues(Collections.singleton(entityType)), + EntityFieldType.URN, + FieldResolver.getResolverFromValues(Collections.singleton(entityUrn)), + EntityFieldType.OWNER, + FieldResolver.getResolverFromValues(owners), + EntityFieldType.DOMAIN, + FieldResolver.getResolverFromValues(domains), + EntityFieldType.GROUP_MEMBERSHIP, FieldResolver.getResolverFromValues(groups))); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java index 52a8d2454ffba..ca9ee92b77a5a 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/RoleServiceTest.java @@ -1,5 +1,7 @@ package com.datahub.authorization; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -10,9 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; - - public class RoleServiceTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; @@ -35,21 +34,23 @@ public void setupTest() throws Exception { @Test public void testBatchAssignRoleNoActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - false); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(false); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), - roleUrn, - SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, never()).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } @Test public void testBatchAssignRoleSomeActorExists() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -57,12 +58,15 @@ public void testBatchAssignRoleSomeActorExists() throws Exception { @Test public void testBatchAssignRoleAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); - when(_entityClient.exists(eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); + when(_entityClient.exists( + eq(Urn.createFromString(SECOND_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING, SECOND_ACTOR_URN_STRING), roleUrn, SYSTEM_AUTHENTICATION); verify(_entityClient, times(2)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); @@ -70,10 +74,12 @@ public void testBatchAssignRoleAllActorsExist() throws Exception { @Test public void testAssignNullRoleToActorAllActorsExist() throws Exception { - when(_entityClient.exists(eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))).thenReturn( - true); + when(_entityClient.exists( + eq(Urn.createFromString(FIRST_ACTOR_URN_STRING)), eq(SYSTEM_AUTHENTICATION))) + .thenReturn(true); - _roleService.batchAssignRoleToActors(ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); + _roleService.batchAssignRoleToActors( + ImmutableList.of(FIRST_ACTOR_URN_STRING), null, SYSTEM_AUTHENTICATION); verify(_entityClient, times(1)).ingestProposal(any(), eq(SYSTEM_AUTHENTICATION), eq(false)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java index 5c7d87f1c05a9..d5d5b0c4e6c71 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/DataPlatformInstanceFieldResolverProviderTest.java @@ -40,10 +40,8 @@ public class DataPlatformInstanceFieldResolverProviderTest { "urn:li:dataset:(urn:li:dataPlatform:s3,test-platform-instance.testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private DataPlatformInstanceFieldResolverProvider dataPlatformInstanceFieldResolverProvider; @@ -56,37 +54,42 @@ public void setup() { @Test public void shouldReturnDataPlatformInstanceType() { - assertEquals(EntityFieldType.DATA_PLATFORM_INSTANCE, dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.DATA_PLATFORM_INSTANCE, + dataPlatformInstanceFieldResolverProvider.getFieldTypes().get(0)); } @Test public void shouldReturnFieldValueWithResourceSpecIfTypeIsDataPlatformInstance() { - var resourceSpec = new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); + var resourceSpec = + new EntitySpec(DATA_PLATFORM_INSTANCE_ENTITY_NAME, DATA_PLATFORM_INSTANCE_URN); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(resourceSpec); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); verifyZeroInteractions(entityClientMock); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -95,99 +98,104 @@ public void shouldReturnEmptyFieldValueWhenResourceHasNoDataPlatformInstance() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnEmptyFieldValueWhenDataPlatformInstanceHasNoInstance() throws RemoteInvocationException, URISyntaxException { - var dataPlatform = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); + var dataPlatform = + new DataPlatformInstance().setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatform.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithDataPlatformInstanceOfTheResource() throws RemoteInvocationException, URISyntaxException { - var dataPlatformInstance = new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) - .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); + var dataPlatformInstance = + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")) + .setInstance(Urn.createFromString(DATA_PLATFORM_INSTANCE_URN)); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, + envelopedAspectMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(dataPlatformInstance.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = dataPlatformInstanceFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(DATA_PLATFORM_INSTANCE_URN), result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(Collections.singleton(DATA_PLATFORM_INSTANCE_ASPECT_NAME)), + eq(systemAuthenticationMock)); } } diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java index af547f14cd3fc..542f6c9f8bc79 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/fieldresolverprovider/GroupMembershipFieldResolverProviderTest.java @@ -1,5 +1,12 @@ package com.datahub.authorization.fieldresolverprovider; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.datahub.authorization.EntityFieldType; import com.datahub.authorization.EntitySpec; @@ -15,32 +22,23 @@ import com.linkedin.identity.GroupMembership; import com.linkedin.identity.NativeGroupMembership; import com.linkedin.r2.RemoteInvocationException; +import java.net.URISyntaxException; +import java.util.Set; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import java.net.URISyntaxException; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertTrue; - public class GroupMembershipFieldResolverProviderTest { private static final String CORPGROUP_URN = "urn:li:corpGroup:groupname"; private static final String NATIVE_CORPGROUP_URN = "urn:li:corpGroup:nativegroupname"; - private static final String RESOURCE_URN = "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; + private static final String RESOURCE_URN = + "urn:li:dataset:(urn:li:dataPlatform:testPlatform,testDataset,PROD)"; private static final EntitySpec RESOURCE_SPEC = new EntitySpec(DATASET_ENTITY_NAME, RESOURCE_URN); - @Mock - private EntityClient entityClientMock; - @Mock - private Authentication systemAuthenticationMock; + @Mock private EntityClient entityClientMock; + @Mock private Authentication systemAuthenticationMock; private GroupMembershipFieldResolverProvider groupMembershipFieldResolverProvider; @@ -53,27 +51,30 @@ public void setup() { @Test public void shouldReturnGroupsMembershipType() { - assertEquals(EntityFieldType.GROUP_MEMBERSHIP, groupMembershipFieldResolverProvider.getFieldTypes().get(0)); + assertEquals( + EntityFieldType.GROUP_MEMBERSHIP, + groupMembershipFieldResolverProvider.getFieldTypes().get(0)); } @Test - public void shouldReturnEmptyFieldValueWhenResponseIsNull() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenResponseIsNull() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(null); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(null); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test @@ -82,131 +83,144 @@ public void shouldReturnEmptyFieldValueWhenResourceDoesNotBelongToAnyGroup() var entityResponseMock = mock(EntityResponse.class); when(entityResponseMock.getAspects()).thenReturn(new EnvelopedAspectMap()); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test - public void shouldReturnEmptyFieldValueWhenThereIsAnException() throws RemoteInvocationException, URISyntaxException { + public void shouldReturnEmptyFieldValueWhenThereIsAnException() + throws RemoteInvocationException, URISyntaxException { when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenThrow(new RemoteInvocationException()); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenThrow(new RemoteInvocationException()); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertTrue(result.getFieldValuesFuture().join().getValues().isEmpty()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithOnlyNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); assertEquals(Set.of(NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } @Test public void shouldReturnFieldValueWithGroupsAndNativeGroupsOfTheResource() throws RemoteInvocationException, URISyntaxException { - var groupMembership = new GroupMembership().setGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); - var nativeGroupMembership = new NativeGroupMembership().setNativeGroups( - new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); + var groupMembership = + new GroupMembership() + .setGroups(new UrnArray(ImmutableList.of(Urn.createFromString(CORPGROUP_URN)))); + var nativeGroupMembership = + new NativeGroupMembership() + .setNativeGroups( + new UrnArray(ImmutableList.of(Urn.createFromString(NATIVE_CORPGROUP_URN)))); var entityResponseMock = mock(EntityResponse.class); var envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(groupMembership.data()))); - envelopedAspectMap.put(NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, + envelopedAspectMap.put( + NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(nativeGroupMembership.data()))); when(entityResponseMock.getAspects()).thenReturn(envelopedAspectMap); when(entityClientMock.getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - )).thenReturn(entityResponseMock); + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock))) + .thenReturn(entityResponseMock); var result = groupMembershipFieldResolverProvider.getFieldResolver(RESOURCE_SPEC); - assertEquals(Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), result.getFieldValuesFuture().join().getValues()); - verify(entityClientMock, times(1)).getV2( - eq(DATASET_ENTITY_NAME), - any(Urn.class), - eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), - eq(systemAuthenticationMock) - ); + assertEquals( + Set.of(CORPGROUP_URN, NATIVE_CORPGROUP_URN), + result.getFieldValuesFuture().join().getValues()); + verify(entityClientMock, times(1)) + .getV2( + eq(DATASET_ENTITY_NAME), + any(Urn.class), + eq(ImmutableSet.of(GROUP_MEMBERSHIP_ASPECT_NAME, NATIVE_GROUP_MEMBERSHIP_ASPECT_NAME)), + eq(systemAuthenticationMock)); } -} \ No newline at end of file +} diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index 2e974d309f127..a0bbe69691db4 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -1,5 +1,9 @@ package com.datahub.telemetry; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -19,16 +23,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class TrackingServiceTest { private static final String APP_VERSION_FIELD = "appVersion"; private static final String APP_VERSION = "1.0.0"; private static final String CLIENT_ID = "testClientId"; - private static final TelemetryClientId TELEMETRY_CLIENT_ID = new TelemetryClientId().setClientId(CLIENT_ID); + private static final TelemetryClientId TELEMETRY_CLIENT_ID = + new TelemetryClientId().setClientId(CLIENT_ID); private static final String NOT_ALLOWED_FIELD = "browserId"; private static final String NOT_ALLOWED_FIELD_VALUE = "testBrowserId"; private static final String EVENT_TYPE_FIELD = "type"; @@ -38,10 +38,17 @@ public class TrackingServiceTest { private static final String ACTOR_URN_STRING = "urn:li:corpuser:user"; private static final String HASHED_ACTOR_URN_STRING = "hashedActorUrn"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private Urn _clientIdUrn; private JSONObject _mixpanelMessage; private MixpanelAPI _mixpanelAPI; @@ -62,19 +69,28 @@ public void setupTest() { GitVersion gitVersion = new GitVersion(APP_VERSION, "", Optional.empty()); _trackingService = - new TrackingService(_mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); + new TrackingService( + _mixpanelAPI, _mixpanelMessageBuilder, _secretService, _entityService, gitVersion); } @Test public void testEmitAnalyticsEvent() throws IOException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); - when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())).thenReturn(_mixpanelMessage); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); + when(_mixpanelMessageBuilder.event(eq(CLIENT_ID), eq(EVENT_TYPE), any())) + .thenReturn(_mixpanelMessage); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); _trackingService.emitAnalyticsEvent(event); @@ -84,7 +100,8 @@ public void testEmitAnalyticsEvent() throws IOException { @Test public void testGetClientIdAlreadyExists() { when(_entityService.exists(_clientIdUrn)).thenReturn(true); - when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))).thenReturn(TELEMETRY_CLIENT_ID); + when(_entityService.getLatestAspect(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT))) + .thenReturn(TELEMETRY_CLIENT_ID); assertEquals(CLIENT_ID, _trackingService.getClientId()); } @@ -94,15 +111,17 @@ public void testGetClientIdDoesNotExist() { when(_entityService.exists(_clientIdUrn)).thenReturn(false); assertNotNull(_trackingService.getClientId()); - verify(_entityService, times(1)).ingestAspectIfNotPresent(eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), - any(TelemetryClientId.class), any(), eq(null)); + verify(_entityService, times(1)) + .ingestAspectIfNotPresent( + eq(_clientIdUrn), eq(CLIENT_ID_ASPECT), any(TelemetryClientId.class), any(), eq(null)); } @Test public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + ACTOR_URN_FIELD, ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -116,8 +135,9 @@ public void testSanitizeEventNoEventType() throws JsonProcessingException, JSONE @Test public void testSanitizeEventNoActorUrn() throws JsonProcessingException, JSONException { final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, - NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, EVENT_TYPE, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); @@ -133,8 +153,14 @@ public void testSanitizeEvent() throws JsonProcessingException, JSONException { when(_secretService.hashString(eq(ACTOR_URN_STRING))).thenReturn(HASHED_ACTOR_URN_STRING); final String eventString = - String.format("{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", EVENT_TYPE_FIELD, EVENT_TYPE, ACTOR_URN_FIELD, - ACTOR_URN_STRING, NOT_ALLOWED_FIELD, NOT_ALLOWED_FIELD_VALUE); + String.format( + "{\"%s\": \"%s\", \"%s\": \"%s\", \"%s\": \"%s\"}", + EVENT_TYPE_FIELD, + EVENT_TYPE, + ACTOR_URN_FIELD, + ACTOR_URN_STRING, + NOT_ALLOWED_FIELD, + NOT_ALLOWED_FIELD_VALUE); final JsonNode event = OBJECT_MAPPER.readTree(eventString); final JSONObject sanitizedEvent = _trackingService.sanitizeEvent(event); diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index 34354a47b7f04..4e9fe3e335dc3 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -1,13 +1,15 @@ package com.datahub.auth.authentication; -import com.datahub.authentication.invite.InviteTokenService; -import com.datahub.authentication.token.StatelessTokenService; -import com.datahub.authentication.token.TokenType; -import com.datahub.authentication.user.NativeUserService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.token.StatelessTokenService; +import com.datahub.authentication.token.TokenType; +import com.datahub.authentication.user.NativeUserService; import com.datahub.telemetry.TrackingService; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; @@ -29,9 +31,6 @@ import org.springframework.web.bind.annotation.RestController; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class AuthServiceController { @@ -46,55 +45,49 @@ public class AuthServiceController { private static final String INVITE_TOKEN_FIELD_NAME = "inviteToken"; private static final String RESET_TOKEN_FIELD_NAME = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD_NAME = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD_NAME = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD_NAME = "doesPasswordMatch"; - @Inject - StatelessTokenService _statelessTokenService; + @Inject StatelessTokenService _statelessTokenService; - @Inject - Authentication _systemAuthentication; + @Inject Authentication _systemAuthentication; @Inject @Qualifier("configurationProvider") ConfigurationProvider _configProvider; - @Inject - NativeUserService _nativeUserService; + @Inject NativeUserService _nativeUserService; - @Inject - InviteTokenService _inviteTokenService; + @Inject InviteTokenService _inviteTokenService; - @Inject - @Nullable - TrackingService _trackingService; + @Inject @Nullable TrackingService _trackingService; /** - * Generates a JWT access token for as user UI session, provided a unique "user id" to generate the token for inside a JSON - * POST body. + * Generates a JWT access token for as user UI session, provided a unique "user id" to generate + * the token for inside a JSON POST body. * - * Example Request: + *

Example Request: * - * POST /generateSessionTokenForUser -H "Authorization: Basic :" - * { - * "userId": "datahub" - * } + *

POST /generateSessionTokenForUser -H "Authorization: Basic + * :" { "userId": "datahub" } * - * Example Response: + *

Example Response: * - * { - * "accessToken": "" - * } + *

{ "accessToken": "" } */ @PostMapping(value = "/generateSessionTokenForUser", produces = "application/json;charset=utf-8") - CompletableFuture> generateSessionTokenForUser(final HttpEntity httpEntity) { + CompletableFuture> generateSessionTokenForUser( + final HttpEntity httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to generate session token %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to generate session token %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -110,46 +103,45 @@ CompletableFuture> generateSessionTokenForUser(final Http log.debug(String.format("Attempting to generate session token for user %s", userId.asText())); final String actorId = AuthenticationContext.getAuthentication().getActor().getId(); - return CompletableFuture.supplyAsync(() -> { - // 1. Verify that only those authorized to generate a token (datahub system) are able to. - if (isAuthorizedToGenerateSessionToken(actorId)) { - try { - // 2. Generate a new DataHub JWT - final String token = _statelessTokenService.generateAccessToken( - TokenType.SESSION, - new Actor(ActorType.USER, userId.asText()), - _configProvider.getAuthentication().getSessionTokenDurationMs()); - return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to generate session token for user", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - throw HttpClientErrorException.create(HttpStatus.UNAUTHORIZED, "Unauthorized to perform this action.", new HttpHeaders(), null, null); - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. Verify that only those authorized to generate a token (datahub system) are able to. + if (isAuthorizedToGenerateSessionToken(actorId)) { + try { + // 2. Generate a new DataHub JWT + final String token = + _statelessTokenService.generateAccessToken( + TokenType.SESSION, + new Actor(ActorType.USER, userId.asText()), + _configProvider.getAuthentication().getSessionTokenDurationMs()); + return new ResponseEntity<>(buildTokenResponse(token), HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to generate session token for user", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + } + throw HttpClientErrorException.create( + HttpStatus.UNAUTHORIZED, + "Unauthorized to perform this action.", + new HttpHeaders(), + null, + null); + }); } /** - * Creates a native DataHub user using the provided full name, email and password. The provided invite token must - * be current otherwise a new user will not be created. + * Creates a native DataHub user using the provided full name, email and password. The provided + * invite token must be current otherwise a new user will not be created. * - * Example Request: + *

Example Request: * - * POST /signUp -H "Authorization: Basic :" - * { - * "fullName": "Full Name" - * "userUrn": "urn:li:corpuser:test" - * "email": "email@test.com" - * "title": "Data Scientist" - * "password": "password123" - * "inviteToken": "abcd" - * } + *

POST /signUp -H "Authorization: Basic :" { + * "fullName": "Full Name" "userUrn": "urn:li:corpuser:test" "email": "email@test.com" "title": + * "Data Scientist" "password": "password123" "inviteToken": "abcd" } * - * Example Response: + *

Example Response: * - * { - * "isNativeUserCreated": true - * } + *

{ "isNativeUserCreated": true } */ @PostMapping(value = "/signUp", produces = "application/json;charset=utf-8") CompletableFuture> signUp(final HttpEntity httpEntity) { @@ -159,7 +151,8 @@ CompletableFuture> signUp(final HttpEntity httpEn try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -174,15 +167,22 @@ CompletableFuture> signUp(final HttpEntity httpEn JsonNode title = bodyJson.get(TITLE_FIELD_NAME); JsonNode password = bodyJson.get(PASSWORD_FIELD_NAME); JsonNode inviteToken = bodyJson.get(INVITE_TOKEN_FIELD_NAME); - if (fullName == null || userUrn == null || email == null || title == null || password == null + if (fullName == null + || userUrn == null + || email == null + || title == null + || password == null || inviteToken == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String userUrnString = userUrn.asText(); - String systemClientUser = new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); + String systemClientUser = + new CorpuserUrn(_configProvider.getAuthentication().getSystemClientId()).toString(); - if (userUrnString.equals(systemClientUser) || userUrnString.equals(DATAHUB_ACTOR) || userUrnString.equals(UNKNOWN_ACTOR)) { + if (userUrnString.equals(systemClientUser) + || userUrnString.equals(DATAHUB_ACTOR) + || userUrnString.equals(UNKNOWN_ACTOR)) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } String fullNameString = fullName.asText(); @@ -192,53 +192,52 @@ CompletableFuture> signUp(final HttpEntity httpEn String inviteTokenString = inviteToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to create native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { - log.error(String.format("Invalid invite token %s", inviteTokenString)); - return new ResponseEntity<>(HttpStatus.BAD_REQUEST); - } + return CompletableFuture.supplyAsync( + () -> { + try { + Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenString); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, auth)) { + log.error(String.format("Invalid invite token %s", inviteTokenString)); + return new ResponseEntity<>(HttpStatus.BAD_REQUEST); + } - _nativeUserService.createNativeUser(userUrnString, fullNameString, emailString, titleString, passwordString, - auth); - String response = buildSignUpResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to create credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + _nativeUserService.createNativeUser( + userUrnString, fullNameString, emailString, titleString, passwordString, auth); + String response = buildSignUpResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to create credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** - * Resets the credentials for a native DataHub user using the provided email and new password. The provided reset - * token must be current otherwise the credentials will not be updated + * Resets the credentials for a native DataHub user using the provided email and new password. The + * provided reset token must be current otherwise the credentials will not be updated * - * Example Request: + *

Example Request: * - * POST /resetNativeUserCredentials -H "Authorization: Basic :" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * "resetToken": "abcd" - * } + *

POST /resetNativeUserCredentials -H "Authorization: Basic + * :" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" "resetToken": "abcd" } * - * Example Response: + *

Example Response: * - * { - * "areNativeUserCredentialsReset": true - * } + *

{ "areNativeUserCredentialsReset": true } */ @PostMapping(value = "/resetNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture> resetNativeUserCredentials(final HttpEntity httpEntity) { + CompletableFuture> resetNativeUserCredentials( + final HttpEntity httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to create native user %s", jsonStr)); + log.error( + String.format("Failed to parse json while attempting to create native user %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -259,45 +258,46 @@ CompletableFuture> resetNativeUserCredentials(final HttpE String resetTokenString = resetToken.asText(); Authentication auth = AuthenticationContext.getAuthentication(); log.debug(String.format("Attempting to reset credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - _nativeUserService.resetCorpUserCredentials(userUrnString, passwordString, resetTokenString, - auth); - String response = buildResetNativeUserCredentialsResponse(); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to reset credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _nativeUserService.resetCorpUserCredentials( + userUrnString, passwordString, resetTokenString, auth); + String response = buildResetNativeUserCredentialsResponse(); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to reset credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } /** * Verifies the credentials for a native DataHub user. * - * Example Request: + *

Example Request: * - * POST /verifyNativeUserCredentials -H "Authorization: Basic :" - * { - * "userUrn": "urn:li:corpuser:test" - * "password": "password123" - * } + *

POST /verifyNativeUserCredentials -H "Authorization: Basic + * :" { "userUrn": "urn:li:corpuser:test" "password": + * "password123" } * - * Example Response: + *

Example Response: * - * { - * "passwordMatches": true - * } + *

{ "passwordMatches": true } */ @PostMapping(value = "/verifyNativeUserCredentials", produces = "application/json;charset=utf-8") - CompletableFuture> verifyNativeUserCredentials(final HttpEntity httpEntity) { + CompletableFuture> verifyNativeUserCredentials( + final HttpEntity httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); JsonNode bodyJson; try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to verify native user password %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to verify native user password %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { @@ -315,21 +315,22 @@ CompletableFuture> verifyNativeUserCredentials(final Http String userUrnString = userUrn.asText(); String passwordString = password.asText(); log.debug(String.format("Attempting to verify credentials for native user %s", userUrnString)); - return CompletableFuture.supplyAsync(() -> { - try { - boolean doesPasswordMatch = _nativeUserService.doesPasswordMatch(userUrnString, passwordString); - String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); - return new ResponseEntity<>(response, HttpStatus.OK); - } catch (Exception e) { - log.error(String.format("Failed to verify credentials for native user %s", userUrnString), e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + boolean doesPasswordMatch = + _nativeUserService.doesPasswordMatch(userUrnString, passwordString); + String response = buildVerifyNativeUserPasswordResponse(doesPasswordMatch); + return new ResponseEntity<>(response, HttpStatus.OK); + } catch (Exception e) { + log.error( + String.format("Failed to verify credentials for native user %s", userUrnString), e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } - /** - * Tracking endpoint - */ + /** Tracking endpoint */ @PostMapping(value = "/track", produces = "application/json;charset=utf-8") CompletableFuture> track(final HttpEntity httpEntity) { String jsonStr = httpEntity.getBody(); @@ -338,23 +339,26 @@ CompletableFuture> track(final HttpEntity httpEnt try { bodyJson = mapper.readTree(jsonStr); } catch (JsonProcessingException e) { - log.error(String.format("Failed to parse json while attempting to track analytics event %s", jsonStr)); + log.error( + String.format( + "Failed to parse json while attempting to track analytics event %s", jsonStr)); return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } if (bodyJson == null) { return CompletableFuture.completedFuture(new ResponseEntity<>(HttpStatus.BAD_REQUEST)); } - return CompletableFuture.supplyAsync(() -> { - try { - if (_trackingService != null) { - _trackingService.emitAnalyticsEvent(bodyJson); - } - return new ResponseEntity<>(HttpStatus.OK); - } catch (Exception e) { - log.error("Failed to track event", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (_trackingService != null) { + _trackingService.emitAnalyticsEvent(bodyJson); + } + return new ResponseEntity<>(HttpStatus.OK); + } catch (Exception e) { + log.error("Failed to track event", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); + } + }); } // Currently, only internal system is authorized to generate a token on behalf of a user! diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java index cf882f6ce6813..9f1aefd4cf17a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AssetsConfiguration.java @@ -2,12 +2,10 @@ import lombok.Data; - @Data public class AssetsConfiguration { - /** - * The url of the logo to render in the DataHub Application. - */ + /** The url of the logo to render in the DataHub Application. */ public String logoUrl; + public String faviconUrl; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java index 6a5c13970517a..eacbe7816f75c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/AuthPluginConfiguration.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class AuthPluginConfiguration { - /** - * Plugin base directory path, default to /etc/datahub/plugins/auth - */ + /** Plugin base directory path, default to /etc/datahub/plugins/auth */ String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java index 2374686b76d01..72dfbf84a4a00 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/DataHubConfiguration.java @@ -1,15 +1,15 @@ package com.linkedin.metadata.config; import lombok.Data; -/** - * POJO representing the "datahub" configuration block in application.yml. - */ + +/** POJO representing the "datahub" configuration block in application.yml. */ @Data public class DataHubConfiguration { /** - * Indicates the type of server that has been deployed: quickstart, prod, or a custom configuration + * Indicates the type of server that has been deployed: quickstart, prod, or a custom + * configuration */ public String serverType; private PluginConfiguration plugin; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java index 7c4394d07bf9c..8ebea29a32659 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityProfileConfig.java @@ -2,11 +2,11 @@ import lombok.Data; - @Data public class EntityProfileConfig { /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. */ public String domainDefaultTab; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java index 4b00346a469c3..4fcbca3527d2a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/EntityRegistryPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class EntityRegistryPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java index 5b10b59ff0c20..2f3c3dc3bd546 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/IngestionConfiguration.java @@ -2,18 +2,12 @@ import lombok.Data; -/** - * POJO representing the "ingestion" configuration block in application.yml. - */ +/** POJO representing the "ingestion" configuration block in application.yml. */ @Data public class IngestionConfiguration { - /** - * Whether managed ingestion is enabled - */ + /** Whether managed ingestion is enabled */ public boolean enabled; - /** - * The default CLI version to use in managed ingestion - */ + /** The default CLI version to use in managed ingestion */ public String defaultCliVersion; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java index 0645c1d7ea96c..5f34a6a5d4f05 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PluginConfiguration.java @@ -2,28 +2,24 @@ import lombok.Data; - @Data public class PluginConfiguration { /** * Plugin security mode, either RESTRICTED or LENIENT * - * Note: Ideally the pluginSecurityMode should be of type com.datahub.plugin.common.SecurityMode from metadata-service/plugin, - * However avoiding to include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules - * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency + *

Note: Ideally the pluginSecurityMode should be of type + * com.datahub.plugin.common.SecurityMode from metadata-service/plugin, However avoiding to + * include metadata-service/plugin as dependency in this module (i.e. metadata-io) as some modules + * from metadata-service/ are dependent on metadata-io, so it might create a circular dependency */ private String pluginSecurityMode; - /** - * Directory path of entity registry, default to /etc/datahub/plugins/models - */ + /** Directory path of entity registry, default to /etc/datahub/plugins/models */ private EntityRegistryPluginConfiguration entityRegistry; - /** - * The location where the Retention config files live - */ + + /** The location where the Retention config files live */ private RetentionPluginConfiguration retention; - /** - * Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth - */ + + /** Plugin framework's plugin base directory path, default to /etc/datahub/plugins/auth */ private AuthPluginConfiguration auth; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java index 912abf525631b..987df7f307d39 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/PreProcessHooks.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PreProcessHooks { private boolean uiEnabled; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java index ef03206996823..cc80954afd27e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/QueriesTabConfig.java @@ -2,11 +2,8 @@ import lombok.Data; - @Data public class QueriesTabConfig { - /** - * The number of queries to show on the queries tab. - */ + /** The number of queries to show on the queries tab. */ public Integer queriesTabResultSize; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java index dde7ede34c659..d553f2e268509 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/RetentionPluginConfiguration.java @@ -2,8 +2,7 @@ import lombok.Data; - @Data public class RetentionPluginConfiguration { String path; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java index 7094bbd710f75..1901c433e82c8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/SearchResultVisualConfig.java @@ -4,8 +4,9 @@ @Data public class SearchResultVisualConfig { - /** - * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not present. - */ - public Boolean enableNameHighlight; + /** + * The default tab to show first on a Domain entity profile. Defaults to React code sorting if not + * present. + */ + public Boolean enableNameHighlight; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java index 96e4a1716974e..738a9684cc764 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/TestsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "tests" configuration block in application.yml.on.yml - */ +/** POJO representing the "tests" configuration block in application.yml.on.yml */ @Data public class TestsConfiguration { - /** - * Whether tests are enabled - */ + /** Whether tests are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java index 89c7376dfd110..670a412ec285e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/ViewsConfiguration.java @@ -2,13 +2,9 @@ import lombok.Data; -/** - * POJO representing the "views" configuration block in application.yml.on.yml - */ +/** POJO representing the "views" configuration block in application.yml.on.yml */ @Data public class ViewsConfiguration { - /** - * Whether Views are enabled - */ + /** Whether Views are enabled */ public boolean enabled; -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java index 14ac2406c2256..bc749a373c5b0 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/VisualConfiguration.java @@ -2,29 +2,18 @@ import lombok.Data; - -/** - * POJO representing visualConfig block in the application.yml. - */ +/** POJO representing visualConfig block in the application.yml. */ @Data public class VisualConfiguration { - /** - * Asset related configurations - */ + /** Asset related configurations */ public AssetsConfiguration assets; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public QueriesTabConfig queriesTab; - /** - * Queries tab related configurations - */ + /** Queries tab related configurations */ public EntityProfileConfig entityProfile; - /** - * Search result related configurations - */ + /** Search result related configurations */ public SearchResultVisualConfig searchResult; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java index aff0e23e3b337..70601b8a69fe6 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/CacheConfiguration.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.cache.client.ClientCacheConfiguration; import lombok.Data; - @Data public class CacheConfiguration { PrimaryCacheConfiguration primary; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java index 3bd7ea1758b2b..9a684ee92f3f8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/EntityDocCountCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class EntityDocCountCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java index 3e60c4bf587e1..be39e71bb1b52 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/HomepageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HomepageCacheConfiguration { EntityDocCountCacheConfiguration entityCounts; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java index a1b08695e4089..001eb41f05cb7 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/PrimaryCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PrimaryCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java index 290b566caf962..ab686cc266b9f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchCacheConfiguration { SearchLineageCacheConfiguration lineage; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java index a121900435b1f..b8fb371dfa13c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/SearchLineageCacheConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchLineageCacheConfiguration { long ttlSeconds; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java index 3cf7ef20797bb..32136929d4875 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfig.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.config.cache.client; - public interface ClientCacheConfig { - boolean isEnabled(); - boolean isStatsEnabled(); - int getStatsIntervalSeconds(); - int getDefaultTTLSeconds(); - int getMaxBytes(); + boolean isEnabled(); + + boolean isStatsEnabled(); + + int getStatsIntervalSeconds(); + + int getDefaultTTLSeconds(); + + int getMaxBytes(); } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java index d940bbe135e55..7564ee978e2bd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/ClientCacheConfiguration.java @@ -4,6 +4,6 @@ @Data public class ClientCacheConfiguration { - EntityClientCacheConfig entityClient; - UsageClientCacheConfig usageClient; + EntityClientCacheConfig entityClient; + UsageClientCacheConfig usageClient; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java index 595b614f2f599..3bb37373db0eb 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/EntityClientCacheConfig.java @@ -1,17 +1,16 @@ package com.linkedin.metadata.config.cache.client; -import lombok.Data; - import java.util.Map; +import lombok.Data; @Data public class EntityClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; - // entityName -> aspectName -> cache ttl override - private Map> entityAspectTTLSeconds; + // entityName -> aspectName -> cache ttl override + private Map> entityAspectTTLSeconds; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java index 3aebec9422ed8..f5a9c24c4b188 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/cache/client/UsageClientCacheConfig.java @@ -4,9 +4,9 @@ @Data public class UsageClientCacheConfig implements ClientCacheConfig { - private boolean enabled; - private boolean statsEnabled; - private int statsIntervalSeconds; - private int defaultTTLSeconds; - private int maxBytes; + private boolean enabled; + private boolean statsEnabled; + private int statsIntervalSeconds; + private int defaultTTLSeconds; + private int maxBytes; } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java index 7a93119226a2d..b505674f2ed9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ConsumerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ConsumerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java index 26a8c6b649133..960baa9cd9172 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/ProducerConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ProducerConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java index 7a8594853e0d0..ac0c248f5e559 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/kafka/SchemaRegistryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SchemaRegistryConfiguration { private String type; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java index 74db6fb9719d4..70ffa59ea40e2 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/BuildIndicesConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BuildIndicesConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java index 7a0292c2adec1..82e3868fa3974 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/CustomConfiguration.java @@ -1,17 +1,15 @@ package com.linkedin.metadata.config.search; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; import lombok.Data; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.FileSystemResource; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; - - @Data @Slf4j public class CustomConfiguration { @@ -20,6 +18,7 @@ public class CustomConfiguration { /** * Materialize the search configuration from a location external to main application.yml + * * @param mapper yaml enabled jackson mapper * @return search configuration class * @throws IOException diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java index 30679bbaab9ce..130620a9ab918 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ElasticSearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ElasticSearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java index 89636ee3c47c5..b471116ebe349 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/ExactMatchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class ExactMatchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java index 816a7e41470f5..6f3e1cb278f5f 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/GraphQueryConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class GraphQueryConfiguration { @@ -11,6 +10,7 @@ public class GraphQueryConfiguration { private int maxResult; public static GraphQueryConfiguration testDefaults; + static { testDefaults = new GraphQueryConfiguration(); testDefaults.setBatchSize(1000); diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java index 5d24248be3650..5dbdcfb269a77 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/PartialConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class PartialConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java index b2b5260dc5e70..befce024fbc1a 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/SearchConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class SearchConfiguration { diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java index 624d2a4c63c4c..fcce110a56d9c 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/WordGramConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class WordGramConfiguration { private float twoGramFactor; diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java index 460501cc91075..652aa0cc8842e 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/BoolQueryConfiguration.java @@ -7,21 +7,19 @@ import lombok.Getter; import lombok.ToString; - @Builder(toBuilder = true) @Getter @ToString @EqualsAndHashCode @JsonDeserialize(builder = BoolQueryConfiguration.BoolQueryConfigurationBuilder.class) public class BoolQueryConfiguration { - private Object must; - private Object should; - //CHECKSTYLE:OFF - private Object must_not; - //CHECKSTYLE:ON - private Object filter; + private Object must; + private Object should; + // CHECKSTYLE:OFF + private Object must_not; + // CHECKSTYLE:ON + private Object filter; - @JsonPOJOBuilder(withPrefix = "") - public static class BoolQueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class BoolQueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java index 15deea7620e3d..e6756ca8f0da8 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/CustomSearchConfiguration.java @@ -2,13 +2,11 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.List; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; -import java.util.List; - - @Builder(toBuilder = true) @Getter @EqualsAndHashCode @@ -18,6 +16,5 @@ public class CustomSearchConfiguration { private List queryConfigurations; @JsonPOJOBuilder(withPrefix = "") - public static class CustomSearchConfigurationBuilder { - } + public static class CustomSearchConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java index cd4364a64a0c5..901bf803d2bca 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/search/custom/QueryConfiguration.java @@ -2,15 +2,13 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; +import java.util.Map; import lombok.Builder; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.ToString; import lombok.extern.slf4j.Slf4j; -import java.util.Map; - - @Slf4j @Builder(toBuilder = true) @Getter @@ -19,17 +17,13 @@ @JsonDeserialize(builder = QueryConfiguration.QueryConfigurationBuilder.class) public class QueryConfiguration { - private String queryRegex; - @Builder.Default - private boolean simpleQuery = true; - @Builder.Default - private boolean exactMatchQuery = true; - @Builder.Default - private boolean prefixMatchQuery = true; - private BoolQueryConfiguration boolQuery; - private Map functionScore; + private String queryRegex; + @Builder.Default private boolean simpleQuery = true; + @Builder.Default private boolean exactMatchQuery = true; + @Builder.Default private boolean prefixMatchQuery = true; + private BoolQueryConfiguration boolQuery; + private Map functionScore; - @JsonPOJOBuilder(withPrefix = "") - public static class QueryConfigurationBuilder { - } + @JsonPOJOBuilder(withPrefix = "") + public static class QueryConfigurationBuilder {} } diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java index 3821cbbed83e8..aa6825360a2df 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/config/telemetry/TelemetryConfiguration.java @@ -1,26 +1,19 @@ package com.linkedin.metadata.config.telemetry; import lombok.Data; -/** - * POJO representing the "telemetry" configuration block in application.yml. - */ + +/** POJO representing the "telemetry" configuration block in application.yml. */ @Data public class TelemetryConfiguration { - /** - * Whether cli telemetry is enabled - */ - public boolean enabledCli; - /** - * Whether reporting telemetry is enabled - */ - public boolean enabledIngestion; - /** - * Whether or not third party logging should be enabled for this instance - */ - public boolean enableThirdPartyLogging; + /** Whether cli telemetry is enabled */ + public boolean enabledCli; + + /** Whether reporting telemetry is enabled */ + public boolean enabledIngestion; + + /** Whether or not third party logging should be enabled for this instance */ + public boolean enableThirdPartyLogging; - /** - * Whether or not server telemetry should be enabled - */ - public boolean enabledServer; -} \ No newline at end of file + /** Whether or not server telemetry should be enabled */ + public boolean enabledServer; +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java index c10399c4f3e70..f84ac9ec8bfe3 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/spring/YamlPropertySourceFactory.java @@ -1,18 +1,14 @@ package com.linkedin.metadata.spring; +import java.io.IOException; +import java.util.Properties; import org.springframework.beans.factory.config.YamlPropertiesFactoryBean; import org.springframework.core.env.PropertiesPropertySource; import org.springframework.core.env.PropertySource; import org.springframework.core.io.support.EncodedResource; import org.springframework.core.io.support.PropertySourceFactory; -import java.io.IOException; -import java.util.Properties; - - -/** - * Required for Spring to parse the application.yml provided by this module - */ +/** Required for Spring to parse the application.yml provided by this module */ public class YamlPropertySourceFactory implements PropertySourceFactory { @Override @@ -25,4 +21,4 @@ public PropertySource createPropertySource(String name, EncodedResource encod return new PropertiesPropertySource(encodedResource.getResource().getFilename(), properties); } -} \ No newline at end of file +} diff --git a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java index d506cf972c255..576969a1032dd 100644 --- a/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java +++ b/metadata-service/configuration/src/main/java/com/linkedin/metadata/timeseries/BatchWriteOperationsOptions.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class BatchWriteOperationsOptions { private int batchSize; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java index b90257870a8b2..ab6990dcf0603 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AuthorizerChainFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.auth; +import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.AuthorizerContext; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.DefaultEntitySpecResolver; +import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.PluginConstant; -import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; -import com.datahub.authorization.AuthorizerContext; -import com.datahub.authorization.EntitySpecResolver; import com.datahub.plugins.common.PluginConfig; import com.datahub.plugins.common.PluginPermissionManager; import com.datahub.plugins.common.PluginType; @@ -19,8 +19,8 @@ import com.datahub.plugins.loader.PluginPermissionManagerImpl; import com.google.common.collect.ImmutableMap; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; @@ -38,7 +38,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -86,61 +85,75 @@ private EntitySpecResolver initResolver() { private List initCustomAuthorizers(EntitySpecResolver resolver) { final List customAuthorizers = new ArrayList<>(); - Path pluginBaseDirectory = Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); - ConfigProvider configProvider = - new ConfigProvider(pluginBaseDirectory); + Path pluginBaseDirectory = + Paths.get(configurationProvider.getDatahub().getPlugin().getAuth().getPath()); + ConfigProvider configProvider = new ConfigProvider(pluginBaseDirectory); Optional optionalConfig = configProvider.load(); // Register authorizer plugins if present - optionalConfig.ifPresent((config) -> { - registerAuthorizer(customAuthorizers, resolver, config); - }); + optionalConfig.ifPresent( + (config) -> { + registerAuthorizer(customAuthorizers, resolver, config); + }); return customAuthorizers; } - private void registerAuthorizer(List customAuthorizers, EntitySpecResolver resolver, Config config) { + private void registerAuthorizer( + List customAuthorizers, EntitySpecResolver resolver, Config config) { PluginConfigFactory authorizerPluginPluginConfigFactory = new PluginConfigFactory(config); // Load only Authorizer configuration from plugin config factory List authorizers = authorizerPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); // Select only enabled authorizer for instantiation - List enabledAuthorizers = authorizers.stream().filter(pluginConfig -> { - if (!pluginConfig.getEnabled()) { - log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); - } - return pluginConfig.getEnabled(); - }).collect(Collectors.toList()); + List enabledAuthorizers = + authorizers.stream() + .filter( + pluginConfig -> { + if (!pluginConfig.getEnabled()) { + log.info(String.format("Authorizer %s is not enabled", pluginConfig.getName())); + } + return pluginConfig.getEnabled(); + }) + .collect(Collectors.toList()); // Get security mode set by user SecurityMode securityMode = - SecurityMode.valueOf(this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); + SecurityMode.valueOf( + this.configurationProvider.getDatahub().getPlugin().getPluginSecurityMode()); // Create permission manager with security mode PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(securityMode); - // Save ContextClassLoader. As some plugins are directly using context classloader from current thread to load libraries + // Save ContextClassLoader. As some plugins are directly using context classloader from current + // thread to load libraries // This will break plugin as their dependencies are inside plugin directory only ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); // Instantiate Authorizer plugins - enabledAuthorizers.forEach((pluginConfig) -> { - // Create context - AuthorizerContext context = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), resolver); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, pluginConfig); - try { - Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - log.info("Initializing plugin {}", pluginConfig.getName()); - authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); - customAuthorizers.add(authorizer); - log.info("Plugin {} is initialized", pluginConfig.getName()); - } catch (ClassNotFoundException e) { - log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); - throw new RuntimeException(e); - } finally { - Thread.currentThread().setContextClassLoader(contextClassLoader); - } - }); + enabledAuthorizers.forEach( + (pluginConfig) -> { + // Create context + AuthorizerContext context = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, pluginConfig.getPluginHomeDirectory().toString()), + resolver); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, pluginConfig); + try { + Thread.currentThread().setContextClassLoader((ClassLoader) isolatedClassLoader); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); + log.info("Initializing plugin {}", pluginConfig.getName()); + authorizer.init(pluginConfig.getConfigs().orElse(Collections.emptyMap()), context); + customAuthorizers.add(authorizer); + log.info("Plugin {} is initialized", pluginConfig.getName()); + } catch (ClassNotFoundException e) { + log.debug(String.format("Failed to init the plugin", pluginConfig.getName())); + throw new RuntimeException(e); + } finally { + Thread.currentThread().setContextClassLoader(contextClassLoader); + } + }); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java index 363c657453b49..296aab95ae427 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/AwsRequestSigningApacheInterceptor.java @@ -34,38 +34,30 @@ import software.amazon.awssdk.regions.Region; /** - * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} - * and {@link AwsCredentialsProvider}. + * An {@link HttpRequestInterceptor} that signs requests using any AWS {@link Signer} and {@link + * AwsCredentialsProvider}. */ public class AwsRequestSigningApacheInterceptor implements HttpRequestInterceptor { - /** - * The service that we're connecting to. - */ + /** The service that we're connecting to. */ private final String service; - /** - * The particular signer implementation. - */ + /** The particular signer implementation. */ private final Signer signer; - /** - * The source of AWS credentials for signing. - */ + /** The source of AWS credentials for signing. */ private final AwsCredentialsProvider awsCredentialsProvider; - /** - * The region signing region. - */ + /** The region signing region. */ private final Region region; /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final Region region) { @@ -76,22 +68,20 @@ public AwsRequestSigningApacheInterceptor(final String service, } /** - * * @param service service that we're connecting to * @param signer particular signer implementation * @param awsCredentialsProvider source of AWS credentials for signing * @param region signing region */ - public AwsRequestSigningApacheInterceptor(final String service, + public AwsRequestSigningApacheInterceptor( + final String service, final Signer signer, final AwsCredentialsProvider awsCredentialsProvider, final String region) { this(service, signer, awsCredentialsProvider, Region.of(region)); } - /** - * {@inheritDoc} - */ + /** {@inheritDoc} */ @Override public void process(final HttpRequest request, final HttpContext context) throws HttpException, IOException { @@ -103,13 +93,13 @@ public void process(final HttpRequest request, final HttpContext context) } // Copy Apache HttpRequest to AWS Request - SdkHttpFullRequest.Builder requestBuilder = SdkHttpFullRequest.builder() - .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) - .uri(buildUri(context, uriBuilder)); + SdkHttpFullRequest.Builder requestBuilder = + SdkHttpFullRequest.builder() + .method(SdkHttpMethod.fromValue(request.getRequestLine().getMethod())) + .uri(buildUri(context, uriBuilder)); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { InputStream content = httpEntityEnclosingRequest.getEntity().getContent(); requestBuilder.contentStreamProvider(() -> content); @@ -119,7 +109,8 @@ public void process(final HttpRequest request, final HttpContext context) requestBuilder.headers(headerArrayToMap(request.getAllHeaders())); ExecutionAttributes attributes = new ExecutionAttributes(); - attributes.putAttribute(AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); + attributes.putAttribute( + AwsSignerExecutionAttribute.AWS_CREDENTIALS, awsCredentialsProvider.resolveCredentials()); attributes.putAttribute(AwsSignerExecutionAttribute.SERVICE_SIGNING_NAME, service); attributes.putAttribute(AwsSignerExecutionAttribute.SIGNING_REGION, region); @@ -129,13 +120,14 @@ public void process(final HttpRequest request, final HttpContext context) // Now copy everything back request.setHeaders(mapToHeaderArray(signedRequest.headers())); if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest httpEntityEnclosingRequest = - (HttpEntityEnclosingRequest) request; + HttpEntityEnclosingRequest httpEntityEnclosingRequest = (HttpEntityEnclosingRequest) request; if (httpEntityEnclosingRequest.getEntity() != null) { BasicHttpEntity basicHttpEntity = new BasicHttpEntity(); - basicHttpEntity.setContent(signedRequest.contentStreamProvider() - .orElseThrow(() -> new IllegalStateException("There must be content")) - .newStream()); + basicHttpEntity.setContent( + signedRequest + .contentStreamProvider() + .orElseThrow(() -> new IllegalStateException("There must be content")) + .newStream()); // wrap into repeatable entity to support retries httpEntityEnclosingRequest.setEntity(new BufferedHttpEntity(basicHttpEntity)); } @@ -159,15 +151,13 @@ private URI buildUri(final HttpContext context, URIBuilder uriBuilder) throws IO } /** - * * @param params list of HTTP query params as NameValuePairs * @return a multimap of HTTP query params */ private static Map> nvpToMapParams(final List params) { Map> parameterMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (NameValuePair nvp : params) { - List argsList = - parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); + List argsList = parameterMap.computeIfAbsent(nvp.getName(), k -> new ArrayList<>()); argsList.add(nvp.getValue()); } return parameterMap; @@ -181,9 +171,10 @@ private static Map> headerArrayToMap(final Header[] headers Map> headersMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); for (Header header : headers) { if (!skipHeader(header)) { - headersMap.put(header.getName(), headersMap - .getOrDefault(header.getName(), - new LinkedList<>(Collections.singletonList(header.getValue())))); + headersMap.put( + header.getName(), + headersMap.getOrDefault( + header.getName(), new LinkedList<>(Collections.singletonList(header.getValue())))); } } return headersMap; @@ -195,7 +186,7 @@ private static Map> headerArrayToMap(final Header[] headers */ private static boolean skipHeader(final Header header) { return ("content-length".equalsIgnoreCase(header.getName()) - && "0".equals(header.getValue())) // Strip Content-Length: 0 + && "0".equals(header.getValue())) // Strip Content-Length: 0 || "host".equalsIgnoreCase(header.getName()); // Host comes from endpoint } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java index 663234e2519fa..3b23243f76742 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubAuthorizerFactory.java @@ -2,8 +2,8 @@ import com.datahub.authentication.Authentication; import com.datahub.authorization.DataHubAuthorizer; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({RestliEntityClientFactory.class}) @@ -43,10 +42,17 @@ public class DataHubAuthorizerFactory { @Nonnull protected DataHubAuthorizer getInstance() { - final DataHubAuthorizer.AuthorizationMode mode = policiesEnabled ? DataHubAuthorizer.AuthorizationMode.DEFAULT - : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; - - return new DataHubAuthorizer(systemAuthentication, entityClient, 10, - policyCacheRefreshIntervalSeconds, mode, policyCacheFetchSize); + final DataHubAuthorizer.AuthorizationMode mode = + policiesEnabled + ? DataHubAuthorizer.AuthorizationMode.DEFAULT + : DataHubAuthorizer.AuthorizationMode.ALLOW_ALL; + + return new DataHubAuthorizer( + systemAuthentication, + entityClient, + 10, + policyCacheRefreshIntervalSeconds, + mode, + policyCacheFetchSize); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java index d47e1a0a73401..83544e4165ae3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/DataHubTokenServiceFactory.java @@ -2,9 +2,8 @@ import com.datahub.authentication.token.StatefulTokenService; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -29,12 +28,7 @@ public class DataHubTokenServiceFactory { @Value("${authentication.tokenService.issuer:datahub-metadata-service}") private String issuer; - /** - * + @Inject - * + @Named("entityService") - * + private EntityService _entityService; - * + - */ + /** + @Inject + @Named("entityService") + private EntityService _entityService; + */ @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -44,11 +38,6 @@ public class DataHubTokenServiceFactory { @Nonnull protected StatefulTokenService getInstance() { return new StatefulTokenService( - this.signingKey, - this.signingAlgorithm, - this.issuer, - this._entityService, - this.saltingKey - ); + this.signingKey, this.signingAlgorithm, this.issuer, this._entityService, this.saltingKey); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java index 57598abf8095d..7c6c4384d7343 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/GroupServiceFactory.java @@ -1,12 +1,10 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.group.GroupService; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +13,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class GroupServiceFactory { @@ -37,4 +34,4 @@ public class GroupServiceFactory { protected GroupService getInstance() throws Exception { return new GroupService(this._javaEntityClient, this._entityService, this._graphClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index 105f4c677a9e4..c44eada46794d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.invite.InviteTokenService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class InviteTokenServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index a0df661852935..844f3a094b6b7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -1,13 +1,11 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authentication.user.NativeUserService; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.client.JavaEntityClient; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +14,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class NativeUserServiceFactory { @@ -32,14 +29,16 @@ public class NativeUserServiceFactory { @Qualifier("dataHubSecretService") private SecretService _secretService; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Bean(name = "nativeUserService") @Scope("singleton") @Nonnull protected NativeUserService getInstance() throws Exception { - return new NativeUserService(_entityService, _javaEntityClient, _secretService, + return new NativeUserService( + _entityService, + _javaEntityClient, + _secretService, _configurationProvider.getAuthentication()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java index cc6f5c8272f9d..a6ae703576a3e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/PostServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.auth; import com.datahub.authentication.post.PostService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PostServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java index 8a85f63cdd66d..7696d5201493a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RoleServiceFactory.java @@ -1,10 +1,8 @@ - - package com.linkedin.gms.factory.auth; import com.datahub.authorization.role.RoleService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RoleServiceFactory { @@ -28,4 +25,4 @@ public class RoleServiceFactory { protected RoleService getInstance() throws Exception { return new RoleService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java index 5bdd8cbf83c65..52d13b05a654d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/SystemAuthenticationFactory.java @@ -13,10 +13,9 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - /** - * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate requests - * made by the internal system. + * Factory responsible for instantiating an instance of {@link Authentication} used to authenticate + * requests made by the internal system. */ @Configuration @ConfigurationProperties @@ -37,7 +36,6 @@ protected Authentication getInstance() { // TODO: Change to service final Actor systemActor = new Actor(ActorType.USER, this.systemClientId); return new Authentication( - systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret) - ); + systemActor, String.format("Basic %s:%s", this.systemClientId, this.systemSecret)); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java index 820b272bedb67..44f1669546e33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/CacheConfig.java @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CacheConfig { @@ -50,25 +49,30 @@ private Caffeine caffeineCacheBuilder() { @ConditionalOnProperty(name = "searchService.cacheImplementation", havingValue = "hazelcast") public CacheManager hazelcastCacheManager() { Config config = new Config(); - // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, containsKey etc. + // TODO: This setting is equivalent to expireAfterAccess, refreshes timer after a get, put, + // containsKey etc. // is this behavior what we actually desire? Should we change it now? MapConfig mapConfig = new MapConfig().setMaxIdleSeconds(cacheTtlSeconds); - EvictionConfig evictionConfig = new EvictionConfig() - .setMaxSizePolicy(MaxSizePolicy.PER_NODE) - .setSize(cacheMaxSize) - .setEvictionPolicy(EvictionPolicy.LFU); + EvictionConfig evictionConfig = + new EvictionConfig() + .setMaxSizePolicy(MaxSizePolicy.PER_NODE) + .setSize(cacheMaxSize) + .setEvictionPolicy(EvictionPolicy.LFU); mapConfig.setEvictionConfig(evictionConfig); mapConfig.setName("default"); config.addMapConfig(mapConfig); config.getNetworkConfig().getJoin().getMulticastConfig().setEnabled(false); - config.getNetworkConfig().getJoin().getKubernetesConfig().setEnabled(true) + config + .getNetworkConfig() + .getJoin() + .getKubernetesConfig() + .setEnabled(true) .setProperty("service-dns", hazelcastServiceName); - HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(config); return new HazelcastCacheManager(hazelcastInstance); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java index d80d57799ee4d..ddfce627b56cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/DatasetUrnNameCasingFactory.java @@ -4,7 +4,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class DatasetUrnNameCasingFactory { @Nonnull @@ -13,4 +12,4 @@ protected Boolean getInstance() { String datasetUrnNameCasingEnv = System.getenv("DATAHUB_DATASET_URN_TO_LOWER"); return Boolean.parseBoolean(datasetUrnNameCasingEnv); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java index 51c7db5e37366..aa47a35f3d38a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchGraphServiceFactory.java @@ -3,12 +3,12 @@ import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.elastic.ESGraphQueryDAO; import com.linkedin.metadata.graph.elastic.ESGraphWriteDAO; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,7 +17,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,17 +29,25 @@ public class ElasticSearchGraphServiceFactory { @Qualifier("entityRegistry") private EntityRegistry entityRegistry; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchGraphService") @Nonnull protected ElasticSearchGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new ElasticSearchGraphService(lineageRegistry, components.getBulkProcessor(), components.getIndexConvention(), - new ESGraphWriteDAO(components.getIndexConvention(), components.getBulkProcessor(), components.getNumRetries()), - new ESGraphQueryDAO(components.getSearchClient(), lineageRegistry, components.getIndexConvention(), - configurationProvider.getElasticSearch().getSearch().getGraph()), + return new ElasticSearchGraphService( + lineageRegistry, + components.getBulkProcessor(), + components.getIndexConvention(), + new ESGraphWriteDAO( + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + new ESGraphQueryDAO( + components.getSearchClient(), + lineageRegistry, + components.getIndexConvention(), + configurationProvider.getElasticSearch().getSearch().getGraph()), components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java index 504618ba9cc6a..20c3e92767ce4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticSearchSystemMetadataServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class}) @@ -24,8 +23,14 @@ public class ElasticSearchSystemMetadataServiceFactory { @Bean(name = "elasticSearchSystemMetadataService") @Nonnull protected ElasticSearchSystemMetadataService getInstance() { - return new ElasticSearchSystemMetadataService(components.getBulkProcessor(), components.getIndexConvention(), - new ESSystemMetadataDAO(components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries()), components.getIndexBuilder()); + return new ElasticSearchSystemMetadataService( + components.getBulkProcessor(), + components.getIndexConvention(), + new ESSystemMetadataDAO( + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries()), + components.getIndexBuilder()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java index 0dce80b98964b..483251644b6c0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/ElasticsearchSSLContextFactory.java @@ -1,103 +1,117 @@ package com.linkedin.gms.factory.common; import com.linkedin.metadata.spring.YamlPropertySourceFactory; -import org.apache.http.ssl.SSLContextBuilder; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -import java.security.NoSuchAlgorithmException; -import java.security.SecureRandom; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; +import org.apache.http.ssl.SSLContextBuilder; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ElasticsearchSSLContextFactory { - @Value("${elasticsearch.sslContext.protocol}") - private String sslProtocol; - - @Value("${elasticsearch.sslContext.secureRandomImplementation}") - private String sslSecureRandomImplementation; + @Value("${elasticsearch.sslContext.protocol}") + private String sslProtocol; - @Value("${elasticsearch.sslContext.trustStoreFile}") - private String sslTrustStoreFile; + @Value("${elasticsearch.sslContext.secureRandomImplementation}") + private String sslSecureRandomImplementation; - @Value("${elasticsearch.sslContext.trustStoreType}") - private String sslTrustStoreType; + @Value("${elasticsearch.sslContext.trustStoreFile}") + private String sslTrustStoreFile; - @Value("${elasticsearch.sslContext.trustStorePassword}") - private String sslTrustStorePassword; + @Value("${elasticsearch.sslContext.trustStoreType}") + private String sslTrustStoreType; - @Value("${elasticsearch.sslContext.keyStoreFile}") - private String sslKeyStoreFile; + @Value("${elasticsearch.sslContext.trustStorePassword}") + private String sslTrustStorePassword; - @Value("${elasticsearch.sslContext.keyStoreType}") - private String sslKeyStoreType; + @Value("${elasticsearch.sslContext.keyStoreFile}") + private String sslKeyStoreFile; - @Value("${elasticsearch.sslContext.keyStorePassword}") - private String sslKeyStorePassword; + @Value("${elasticsearch.sslContext.keyStoreType}") + private String sslKeyStoreType; - @Value("${elasticsearch.sslContext.keyPassword}") - private String sslKeyPassword; + @Value("${elasticsearch.sslContext.keyStorePassword}") + private String sslKeyStorePassword; - @Bean(name = "elasticSearchSSLContext") - public SSLContext createInstance() { - final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); - if (sslProtocol != null) { - sslContextBuilder.useProtocol(sslProtocol); - } + @Value("${elasticsearch.sslContext.keyPassword}") + private String sslKeyPassword; - if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { - loadTrustStore(sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); - } - - if (sslKeyStoreFile != null && sslKeyStoreType != null && sslKeyStorePassword != null && sslKeyPassword != null) { - loadKeyStore(sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); - } - - final SSLContext sslContext; - try { - if (sslSecureRandomImplementation != null) { - sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); - } - sslContext = sslContextBuilder.build(); - } catch (NoSuchAlgorithmException | KeyManagementException e) { - throw new RuntimeException("Failed to build SSL Context", e); - } - return sslContext; + @Bean(name = "elasticSearchSSLContext") + public SSLContext createInstance() { + final SSLContextBuilder sslContextBuilder = new SSLContextBuilder(); + if (sslProtocol != null) { + sslContextBuilder.useProtocol(sslProtocol); } - private void loadKeyStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password, @Nonnull String keyPassword) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException | UnrecoverableKeyException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslTrustStoreFile != null && sslTrustStoreType != null && sslTrustStorePassword != null) { + loadTrustStore( + sslContextBuilder, sslTrustStoreFile, sslTrustStoreType, sslTrustStorePassword); } - private void loadTrustStore(@Nonnull SSLContextBuilder sslContextBuilder, @Nonnull String path, - @Nonnull String type, @Nonnull String password) { - try (InputStream identityFile = new FileInputStream(path)) { - final KeyStore keystore = KeyStore.getInstance(type); - keystore.load(identityFile, password.toCharArray()); - sslContextBuilder.loadTrustMaterial(keystore, null); - } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { - throw new RuntimeException("Failed to load key store: " + path, e); - } + if (sslKeyStoreFile != null + && sslKeyStoreType != null + && sslKeyStorePassword != null + && sslKeyPassword != null) { + loadKeyStore( + sslContextBuilder, sslKeyStoreFile, sslKeyStoreType, sslKeyStorePassword, sslKeyPassword); } + final SSLContext sslContext; + try { + if (sslSecureRandomImplementation != null) { + sslContextBuilder.setSecureRandom(SecureRandom.getInstance(sslSecureRandomImplementation)); + } + sslContext = sslContextBuilder.build(); + } catch (NoSuchAlgorithmException | KeyManagementException e) { + throw new RuntimeException("Failed to build SSL Context", e); + } + return sslContext; + } + + private void loadKeyStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password, + @Nonnull String keyPassword) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadKeyMaterial(keystore, keyPassword.toCharArray()); + } catch (IOException + | CertificateException + | NoSuchAlgorithmException + | KeyStoreException + | UnrecoverableKeyException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } + + private void loadTrustStore( + @Nonnull SSLContextBuilder sslContextBuilder, + @Nonnull String path, + @Nonnull String type, + @Nonnull String password) { + try (InputStream identityFile = new FileInputStream(path)) { + final KeyStore keystore = KeyStore.getInstance(type); + keystore.load(identityFile, password.toCharArray()); + sslContextBuilder.loadTrustMaterial(keystore, null); + } catch (IOException | CertificateException | NoSuchAlgorithmException | KeyStoreException e) { + throw new RuntimeException("Failed to load key store: " + path, e); + } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java index ba66b678d82b9..63a2e42de1d1a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GitVersionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource("classpath:git.properties") public class GitVersionFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java index 1e37c735b5bd4..db4928cfe3764 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphClientFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class}) public class GraphClientFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java index 94593eb1fb84c..d98dfcb617f84 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/GraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; +import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({Neo4jGraphServiceFactory.class, ElasticSearchGraphServiceFactory.class}) @@ -42,7 +41,8 @@ protected GraphService createInstance() { return _elasticSearchGraphService; } else { throw new RuntimeException( - "Error: Failed to initialize graph service. Graph Service provided: " + graphServiceImpl + "Error: Failed to initialize graph service. Graph Service provided: " + + graphServiceImpl + ". Valid options: [neo4j, elasticsearch]."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java index ada8466d302e6..b268bb0937035 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/IndexConventionFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - /** * Creates a {@link IndexConvention} to generate search index names. * diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java index 9805d554d5941..62d4beddd1ab1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalCassandraSessionConfigFactory.java @@ -1,10 +1,8 @@ package com.linkedin.gms.factory.common; -import lombok.extern.slf4j.Slf4j; - import java.util.HashMap; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -36,14 +34,16 @@ public class LocalCassandraSessionConfigFactory { @Bean(name = "gmsCassandraServiceConfig") protected Map createInstance() { - return new HashMap() {{ - put("username", datasourceUsername); - put("password", datasourcePassword); - put("hosts", hosts); - put("port", port); - put("datacenter", datacenter); - put("keyspace", keyspace); - put("useSsl", useSsl); - }}; + return new HashMap() { + { + put("username", datasourceUsername); + put("password", datasourcePassword); + put("hosts", hosts); + put("port", port); + put("datacenter", datacenter); + put("keyspace", keyspace); + put("useSsl", useSsl); + } + }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java index 6bf8ff123b221..08787cdb89aba 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/LocalEbeanServerConfigFactory.java @@ -15,7 +15,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -93,7 +92,8 @@ public DataSourceConfig buildDataSourceConfig(@Value("${ebean.url}") String data } @Bean(name = "gmsEbeanServiceConfig") - protected ServerConfig createInstance(@Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { + protected ServerConfig createInstance( + @Qualifier("ebeanDataSourceConfig") DataSourceConfig config) { ServerConfig serverConfig = new ServerConfig(); serverConfig.setName("gmsEbeanServiceConfig"); serverConfig.setDataSourceConfig(config); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java index 65b6115d6638e..04ed29407518d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jDriverFactory.java @@ -2,7 +2,6 @@ import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.concurrent.TimeUnit; - import org.neo4j.driver.AuthTokens; import org.neo4j.driver.Config; import org.neo4j.driver.Driver; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class Neo4jDriverFactory { @@ -49,10 +47,12 @@ protected Driver createInstance() { Config.ConfigBuilder builder = Config.builder(); builder.withMaxConnectionPoolSize(neo4jMaxConnectionPoolSize); - builder.withConnectionAcquisitionTimeout(neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); + builder.withConnectionAcquisitionTimeout( + neo4jMaxConnectionAcquisitionTimeout, TimeUnit.SECONDS); builder.withMaxConnectionLifetime(neo4jMaxConnectionLifetime(), TimeUnit.SECONDS); builder.withMaxTransactionRetryTime(neo4jMaxTransactionRetryTime, TimeUnit.SECONDS); - builder.withConnectionLivenessCheckTimeout(neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); + builder.withConnectionLivenessCheckTimeout( + neo4jConnectionLivenessCheckTimeout, TimeUnit.SECONDS); return GraphDatabase.driver(uri, AuthTokens.basic(username, password), builder.build()); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java index 87670ce10f481..d3b0cd8aa6d92 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/Neo4jGraphServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.common; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.models.registry.LineageRegistry; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.models.registry.LineageRegistry; import javax.annotation.Nonnull; import org.neo4j.driver.Driver; import org.neo4j.driver.SessionConfig; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({Neo4jDriverFactory.class, EntityRegistryFactory.class}) public class Neo4jGraphServiceFactory { @@ -33,6 +32,7 @@ public class Neo4jGraphServiceFactory { @Nonnull protected Neo4jGraphService getInstance() { LineageRegistry lineageRegistry = new LineageRegistry(entityRegistry); - return new Neo4jGraphService(lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); + return new Neo4jGraphService( + lineageRegistry, neo4jDriver, SessionConfig.forDatabase(neo4jDatabase)); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java index 3c40b30bfc7d1..ddd31f2692934 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/RestHighLevelClientFactory.java @@ -10,10 +10,15 @@ import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpHost; import org.apache.http.HttpRequestInterceptor; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; import org.apache.http.config.RegistryBuilder; import org.apache.http.conn.ssl.DefaultHostnameVerifier; import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.util.PublicSuffixMatcherLoader; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.impl.nio.reactor.IOReactorConfig; @@ -33,11 +38,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.auth.AuthScope; import org.springframework.context.annotation.PropertySource; import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; import software.amazon.awssdk.auth.signer.Aws4Signer; @@ -45,7 +45,7 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@Import({ ElasticsearchSSLContextFactory.class }) +@Import({ElasticsearchSSLContextFactory.class}) public class RestHighLevelClientFactory { @Value("${elasticsearch.host}") @@ -93,21 +93,26 @@ public RestHighLevelClient createInstance(RestClientBuilder restClientBuilder) { public RestClientBuilder loadRestClient() { final RestClientBuilder builder = createBuilder(useSSL ? "https" : "http"); - builder.setHttpClientConfigCallback(httpAsyncClientBuilder -> { - if (useSSL) { - httpAsyncClientBuilder.setSSLContext(sslContext).setSSLHostnameVerifier(new NoopHostnameVerifier()); - } - try { - httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); - } catch (IOReactorException e) { - throw new IllegalStateException("Unable to start ElasticSearch client. Please verify connection configuration."); - } - httpAsyncClientBuilder.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(threadCount).build()); - - setCredentials(httpAsyncClientBuilder); - - return httpAsyncClientBuilder; - }); + builder.setHttpClientConfigCallback( + httpAsyncClientBuilder -> { + if (useSSL) { + httpAsyncClientBuilder + .setSSLContext(sslContext) + .setSSLHostnameVerifier(new NoopHostnameVerifier()); + } + try { + httpAsyncClientBuilder.setConnectionManager(createConnectionManager()); + } catch (IOReactorException e) { + throw new IllegalStateException( + "Unable to start ElasticSearch client. Please verify connection configuration."); + } + httpAsyncClientBuilder.setDefaultIOReactorConfig( + IOReactorConfig.custom().setIoThreadCount(threadCount).build()); + + setCredentials(httpAsyncClientBuilder); + + return httpAsyncClientBuilder; + }); return builder; } @@ -121,41 +126,47 @@ private RestClientBuilder createBuilder(String scheme) { } builder.setRequestConfigCallback( - requestConfigBuilder -> requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); + requestConfigBuilder -> + requestConfigBuilder.setConnectionRequestTimeout(connectionRequestTimeout)); return builder; } /** - * Needed to override ExceptionHandler behavior for cases where IO error would have put client in unrecoverable state - * We don't utilize system properties in the client builder, so setting defaults pulled from - * {@link HttpAsyncClientBuilder#build()}. + * Needed to override ExceptionHandler behavior for cases where IO error would have put client in + * unrecoverable state We don't utilize system properties in the client builder, so setting + * defaults pulled from {@link HttpAsyncClientBuilder#build()}. + * * @return */ private NHttpClientConnectionManager createConnectionManager() throws IOReactorException { SSLContext sslContext = SSLContexts.createDefault(); - HostnameVerifier hostnameVerifier = new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); + HostnameVerifier hostnameVerifier = + new DefaultHostnameVerifier(PublicSuffixMatcherLoader.getDefault()); SchemeIOSessionStrategy sslStrategy = new SSLIOSessionStrategy(sslContext, null, null, hostnameVerifier); - IOReactorConfig ioReactorConfig = IOReactorConfig.custom().setIoThreadCount(threadCount).build(); + IOReactorConfig ioReactorConfig = + IOReactorConfig.custom().setIoThreadCount(threadCount).build(); DefaultConnectingIOReactor ioReactor = new DefaultConnectingIOReactor(ioReactorConfig); - IOReactorExceptionHandler ioReactorExceptionHandler = new IOReactorExceptionHandler() { - @Override - public boolean handle(IOException ex) { - log.error("IO Exception caught during ElasticSearch connection.", ex); - return true; - } - - @Override - public boolean handle(RuntimeException ex) { - log.error("Runtime Exception caught during ElasticSearch connection.", ex); - return true; - } - }; + IOReactorExceptionHandler ioReactorExceptionHandler = + new IOReactorExceptionHandler() { + @Override + public boolean handle(IOException ex) { + log.error("IO Exception caught during ElasticSearch connection.", ex); + return true; + } + + @Override + public boolean handle(RuntimeException ex) { + log.error("Runtime Exception caught during ElasticSearch connection.", ex); + return true; + } + }; ioReactor.setExceptionHandler(ioReactorExceptionHandler); - return new PoolingNHttpClientConnectionManager(ioReactor, + return new PoolingNHttpClientConnectionManager( + ioReactor, RegistryBuilder.create() .register("http", NoopIOSessionStrategy.INSTANCE) .register("https", sslStrategy) @@ -165,7 +176,8 @@ public boolean handle(RuntimeException ex) { private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { if (username != null && password != null) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(username, password)); + credentialsProvider.setCredentials( + AuthScope.ANY, new UsernamePasswordCredentials(username, password)); httpAsyncClientBuilder.setDefaultCredentialsProvider(credentialsProvider); } if (opensearchUseAwsIamAuth) { @@ -177,11 +189,12 @@ private void setCredentials(HttpAsyncClientBuilder httpAsyncClientBuilder) { private HttpRequestInterceptor getAwsRequestSigningInterceptor(String region) { if (region == null) { - throw new IllegalArgumentException("Region must not be null when opensearchUseAwsIamAuth is enabled"); + throw new IllegalArgumentException( + "Region must not be null when opensearchUseAwsIamAuth is enabled"); } Aws4Signer signer = Aws4Signer.create(); // Uses default AWS credentials - return new AwsRequestSigningApacheInterceptor("es", signer, - DefaultCredentialsProvider.create(), region); + return new AwsRequestSigningApacheInterceptor( + "es", signer, DefaultCredentialsProvider.create(), region); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java index 3ba6965577204..5663162186b83 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SiblingGraphServiceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({GraphServiceFactory.class, EntityServiceFactory.class}) public class SiblingGraphServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java index 241c93f438bf1..1c17e433d5507 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/SystemMetadataServiceFactory.java @@ -11,7 +11,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchSystemMetadataServiceFactory.class}) public class SystemMetadataServiceFactory { @@ -24,6 +23,6 @@ public class SystemMetadataServiceFactory { @Bean(name = "systemMetadataService") @Primary protected SystemMetadataService createInstance() { - return _elasticSearchSystemMetadataService; + return _elasticSearchSystemMetadataService; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java index c7df8b1cde6ec..fac0bf0c46685 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/common/TopicConventionFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - /** * Creates a {@link TopicConvention} to generate kafka metadata event topic names. * @@ -32,10 +31,14 @@ public class TopicConventionFactory { @Value("${METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_VERSIONED + "}") private String metadataChangeLogVersionedTopicName; - @Value("${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") + @Value( + "${METADATA_CHANGE_LOG_TIMESERIES_TOPIC_NAME:" + Topics.METADATA_CHANGE_LOG_TIMESERIES + "}") private String metadataChangeLogTimeseriesTopicName; - @Value("${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + Topics.FAILED_METADATA_CHANGE_PROPOSAL + "}") + @Value( + "${FAILED_METADATA_CHANGE_PROPOSAL_TOPIC_NAME:" + + Topics.FAILED_METADATA_CHANGE_PROPOSAL + + "}") private String failedMetadataChangeProposalName; @Value("${PLATFORM_EVENT_TOPIC_NAME:" + Topics.PLATFORM_EVENT + "}") @@ -46,10 +49,17 @@ public class TopicConventionFactory { @Bean(name = TOPIC_CONVENTION_BEAN) protected TopicConvention createInstance() { - return new TopicConventionImpl(metadataChangeEventName, metadataAuditEventName, failedMetadataChangeEventName, - metadataChangeProposalName, metadataChangeLogVersionedTopicName, metadataChangeLogTimeseriesTopicName, - failedMetadataChangeProposalName, platformEventTopicName, + return new TopicConventionImpl( + metadataChangeEventName, + metadataAuditEventName, + failedMetadataChangeEventName, + metadataChangeProposalName, + metadataChangeLogVersionedTopicName, + metadataChangeLogTimeseriesTopicName, + failedMetadataChangeProposalName, + platformEventTopicName, // TODO once we start rolling out v5 add support for changing the new event names. - TopicConventionImpl.DEFAULT_EVENT_PATTERN, dataHubUpgradeHistoryTopicName); + TopicConventionImpl.DEFAULT_EVENT_PATTERN, + dataHubUpgradeHistoryTopicName); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java index 465480be344c7..5c7c2370ab337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/ConfigurationProvider.java @@ -2,6 +2,7 @@ import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.metadata.config.DataHubConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.SystemUpdateConfiguration; @@ -11,76 +12,57 @@ import com.linkedin.metadata.config.cache.CacheConfiguration; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.datahub.graphql.featureflags.FeatureFlags; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @ConfigurationProperties @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Data public class ConfigurationProvider { - /** - * Authentication related configs - */ + /** Authentication related configs */ private AuthenticationConfiguration authentication; - /** - * Authorizer related configs - */ + + /** Authorizer related configs */ private AuthorizationConfiguration authorization; - /** - * Ingestion related configs - */ + + /** Ingestion related configs */ private IngestionConfiguration ingestion; - /** - * Telemetry related configs - */ + + /** Telemetry related configs */ private TelemetryConfiguration telemetry; - /** - * Viz related configs - */ + + /** Viz related configs */ private VisualConfiguration visualConfig; - /** - * Tests related configs - */ + + /** Tests related configs */ private TestsConfiguration metadataTests; - /** - * DataHub top-level server configurations - */ + + /** DataHub top-level server configurations */ private DataHubConfiguration datahub; - /** - * Views feature related configs - */ + + /** Views feature related configs */ private ViewsConfiguration views; - /** - * Feature flags indicating what is turned on vs turned off - */ + + /** Feature flags indicating what is turned on vs turned off */ private FeatureFlags featureFlags; - /** - * Kafka related configs. - */ + + /** Kafka related configs. */ private KafkaConfiguration kafka; - /** - * ElasticSearch configurations - */ + + /** ElasticSearch configurations */ private ElasticSearchConfiguration elasticSearch; - /** - * System Update configurations - */ + + /** System Update configurations */ private SystemUpdateConfiguration systemUpdate; - /** - * Configuration for caching - */ + /** Configuration for caching */ private CacheConfiguration cache; - /** - * Configuration for the health check server - */ + /** Configuration for the health check server */ private HealthCheckConfiguration healthCheck; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java index 6eadf06288d29..23b7ec9edd306 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/config/HealthCheckConfiguration.java @@ -2,7 +2,6 @@ import lombok.Data; - @Data public class HealthCheckConfiguration { private int cacheDurationSeconds; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java index 6eab711603c52..739211855cacd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/dataproduct/DataProductServiceFactory.java @@ -1,9 +1,10 @@ package com.linkedin.gms.factory.dataproduct; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -11,8 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; -import javax.annotation.Nonnull; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class DataProductServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java index ae20f7e96ba40..326537ee07cbd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/CassandraSessionFactory.java @@ -2,6 +2,13 @@ import com.datastax.oss.driver.api.core.CqlSession; import com.datastax.oss.driver.api.core.CqlSessionBuilder; +import java.net.InetSocketAddress; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.net.ssl.SSLContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; @@ -9,14 +16,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; -import javax.net.ssl.SSLContext; -import java.net.InetSocketAddress; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - @Configuration public class CassandraSessionFactory { @@ -30,20 +29,22 @@ public class CassandraSessionFactory { @Nonnull protected CqlSession createSession() { int port = Integer.parseInt(sessionConfig.get("port")); - List addresses = Arrays.stream(sessionConfig.get("hosts").split(",")) - .map(host -> new InetSocketAddress(host, port)) - .collect(Collectors.toList()); + List addresses = + Arrays.stream(sessionConfig.get("hosts").split(",")) + .map(host -> new InetSocketAddress(host, port)) + .collect(Collectors.toList()); String dc = sessionConfig.get("datacenter"); String ks = sessionConfig.get("keyspace"); String username = sessionConfig.get("username"); String password = sessionConfig.get("password"); - CqlSessionBuilder csb = CqlSession.builder() - .addContactPoints(addresses) - .withLocalDatacenter(dc) - .withKeyspace(ks) - .withAuthCredentials(username, password); + CqlSessionBuilder csb = + CqlSession.builder() + .addContactPoints(addresses) + .withLocalDatacenter(dc) + .withKeyspace(ks) + .withAuthCredentials(username, password); if (sessionConfig.containsKey("useSsl") && sessionConfig.get("useSsl").equals("true")) { try { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java index 9feb7e469d018..2bfe7bff1b45a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EbeanServerFactory.java @@ -12,14 +12,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Configuration @Slf4j public class EbeanServerFactory { public static final String EBEAN_MODEL_PACKAGE = EbeanAspectV2.class.getPackage().getName(); - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "ebeanServer") @DependsOn({"gmsEbeanServiceConfig"}) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java index 925689c8609db..94aebb2a39efa 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java index 4000f7d6ed058..9123714de5bc8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityAspectMigrationsDaoFactory.java @@ -5,13 +5,12 @@ import com.linkedin.metadata.entity.cassandra.CassandraAspectDao; import com.linkedin.metadata.entity.ebean.EbeanAspectDao; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - @Configuration public class EntityAspectMigrationsDaoFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java index f1c1a7b743714..e75ec0c0dc44a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/EntityServiceFactory.java @@ -11,6 +11,7 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.service.UpdateIndicesService; import com.linkedin.mxe.TopicConvention; +import javax.annotation.Nonnull; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; import org.springframework.beans.factory.annotation.Qualifier; @@ -19,9 +20,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import javax.annotation.Nonnull; - - @Configuration public class EntityServiceFactory { @@ -29,22 +27,35 @@ public class EntityServiceFactory { private Integer _ebeanMaxTransactionRetry; @Bean(name = "entityService") - @DependsOn({"entityAspectDao", "kafkaEventProducer", "kafkaHealthChecker", - TopicConventionFactory.TOPIC_CONVENTION_BEAN, "entityRegistry"}) + @DependsOn({ + "entityAspectDao", + "kafkaEventProducer", + "kafkaHealthChecker", + TopicConventionFactory.TOPIC_CONVENTION_BEAN, + "entityRegistry" + }) @Nonnull protected EntityService createInstance( - Producer producer, - TopicConvention convention, - KafkaHealthChecker kafkaHealthChecker, - @Qualifier("entityAspectDao") AspectDao aspectDao, - EntityRegistry entityRegistry, - ConfigurationProvider configurationProvider, - UpdateIndicesService updateIndicesService) { - - final KafkaEventProducer eventProducer = new KafkaEventProducer(producer, convention, kafkaHealthChecker); + Producer producer, + TopicConvention convention, + KafkaHealthChecker kafkaHealthChecker, + @Qualifier("entityAspectDao") AspectDao aspectDao, + EntityRegistry entityRegistry, + ConfigurationProvider configurationProvider, + UpdateIndicesService updateIndicesService) { + + final KafkaEventProducer eventProducer = + new KafkaEventProducer(producer, convention, kafkaHealthChecker); FeatureFlags featureFlags = configurationProvider.getFeatureFlags(); - EntityService entityService = new EntityServiceImpl(aspectDao, eventProducer, entityRegistry, - featureFlags.isAlwaysEmitChangeLog(), updateIndicesService, featureFlags.getPreProcessHooks(), _ebeanMaxTransactionRetry); + EntityService entityService = + new EntityServiceImpl( + aspectDao, + eventProducer, + entityRegistry, + featureFlags.isAlwaysEmitChangeLog(), + updateIndicesService, + featureFlags.getPreProcessHooks(), + _ebeanMaxTransactionRetry); return entityService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java index 3f2388f4829e3..080845147766f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/JavaEntityClientFactory.java @@ -1,10 +1,10 @@ package com.linkedin.gms.factory.entity; import com.datahub.authentication.Authentication; -import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.entity.client.RestliEntityClient; +import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.DeleteEntityService; import com.linkedin.metadata.entity.EntityService; @@ -21,7 +21,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @ConditionalOnExpression("'${entityClient.preferredImpl:java}'.equals('java')") @Import({DataHubKafkaProducerFactory.class}) @@ -60,7 +59,8 @@ public class JavaEntityClientFactory { private EventProducer _eventProducer; @Bean("javaEntityClient") - public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { + public JavaEntityClient getJavaEntityClient( + @Qualifier("restliEntityClient") final RestliEntityClient restliEntityClient) { return new JavaEntityClient( _entityService, _deleteEntityService, @@ -74,10 +74,12 @@ public JavaEntityClient getJavaEntityClient(@Qualifier("restliEntityClient") fin } @Bean("systemJavaEntityClient") - public SystemJavaEntityClient systemJavaEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { - SystemJavaEntityClient systemJavaEntityClient = new SystemJavaEntityClient( + public SystemJavaEntityClient systemJavaEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication, + @Qualifier("systemRestliEntityClient") final RestliEntityClient restliEntityClient) { + SystemJavaEntityClient systemJavaEntityClient = + new SystemJavaEntityClient( _entityService, _deleteEntityService, _entitySearchService, diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java index dfc5e835392df..1dee8c4aa4d27 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RestliEntityClientFactory.java @@ -4,19 +4,17 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.restli.client.Client; +import java.net.URI; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.net.URI; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RestliEntityClientFactory { @@ -48,21 +46,28 @@ public RestliEntityClient getRestliEntityClient() { if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } return new RestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries); } @Bean("systemRestliEntityClient") - public SystemRestliEntityClient systemRestliEntityClient(@Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, - @Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public SystemRestliEntityClient systemRestliEntityClient( + @Qualifier("configurationProvider") final ConfigurationProvider configurationProvider, + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { final Client restClient; if (gmsUri != null) { restClient = DefaultRestliClientFactory.getRestLiClient(URI.create(gmsUri), gmsSslProtocol); } else { - restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); + restClient = + DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol); } - return new SystemRestliEntityClient(restClient, new ExponentialBackoff(retryInterval), numRetries, - systemAuthentication, configurationProvider.getCache().getClient().getEntityClient()); + return new SystemRestliEntityClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java index ff56f19e4f8fd..b02541586de49 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RetentionServiceFactory.java @@ -1,12 +1,13 @@ package com.linkedin.gms.factory.entity; import com.datastax.oss.driver.api.core.CqlSession; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.entity.cassandra.CassandraRetentionService; import com.linkedin.metadata.entity.ebean.EbeanRetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.ebean.Database; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -16,9 +17,6 @@ import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class RetentionServiceFactory { @@ -30,24 +28,24 @@ public class RetentionServiceFactory { @Value("${RETENTION_APPLICATION_BATCH_SIZE:1000}") private Integer _batchSize; - @Bean(name = "retentionService") @DependsOn({"cassandraSession", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") @Nonnull protected RetentionService createCassandraInstance(CqlSession session) { - RetentionService retentionService = new CassandraRetentionService(_entityService, session, _batchSize); + RetentionService retentionService = + new CassandraRetentionService(_entityService, session, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } - @Bean(name = "retentionService") @DependsOn({"ebeanServer", "entityService"}) @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull protected RetentionService createEbeanInstance(Database server) { - RetentionService retentionService = new EbeanRetentionService(_entityService, server, _batchSize); + RetentionService retentionService = + new EbeanRetentionService(_entityService, server, _batchSize); _entityService.setRetentionService(retentionService); return retentionService; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java index a4ea02af94bad..d8c1422f988c2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/update/indices/UpdateIndicesServiceFactory.java @@ -17,23 +17,32 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import(EntityIndexBuildersFactory.class) public class UpdateIndicesServiceFactory { - @Autowired - private ApplicationContext context; + @Autowired private ApplicationContext context; + @Value("${entityClient.preferredImpl:java}") private String entityClientImpl; @Bean - public UpdateIndicesService updateIndicesService(GraphService graphService, EntitySearchService entitySearchService, - TimeseriesAspectService timeseriesAspectService, - SystemMetadataService systemMetadataService, - EntityRegistry entityRegistry, SearchDocumentTransformer searchDocumentTransformer, - EntityIndexBuilders entityIndexBuilders) { - UpdateIndicesService updateIndicesService = new UpdateIndicesService(graphService, entitySearchService, timeseriesAspectService, - systemMetadataService, entityRegistry, searchDocumentTransformer, entityIndexBuilders); + public UpdateIndicesService updateIndicesService( + GraphService graphService, + EntitySearchService entitySearchService, + TimeseriesAspectService timeseriesAspectService, + SystemMetadataService systemMetadataService, + EntityRegistry entityRegistry, + SearchDocumentTransformer searchDocumentTransformer, + EntityIndexBuilders entityIndexBuilders) { + UpdateIndicesService updateIndicesService = + new UpdateIndicesService( + graphService, + entitySearchService, + timeseriesAspectService, + systemMetadataService, + entityRegistry, + searchDocumentTransformer, + entityIndexBuilders); if ("restli".equals(entityClientImpl)) { updateIndicesService.setSystemEntityClient(context.getBean(SystemRestliEntityClient.class)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java index cda21f8907867..356fb226937dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/ConfigEntityRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistryException; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.IOException; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Value; @@ -11,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.core.io.Resource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ConfigEntityRegistryFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java index 962bab56cbbf5..2c65eeafe063b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/EntityRegistryFactory.java @@ -32,7 +32,8 @@ public class EntityRegistryFactory { @Primary @Nonnull protected EntityRegistry getInstance() throws EntityRegistryException { - MergedEntityRegistry baseEntityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); + MergedEntityRegistry baseEntityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(configEntityRegistry); pluginEntityRegistryLoader.withBaseRegistry(baseEntityRegistry).start(true); return baseEntityRegistry; } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java index 6dbb07309c7cc..8c6a4ad998aff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entityregistry/PluginEntityRegistryFactory.java @@ -1,7 +1,7 @@ package com.linkedin.gms.factory.entityregistry; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.PluginEntityRegistryLoader; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.io.FileNotFoundException; import java.net.MalformedURLException; import javax.annotation.Nonnull; @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class PluginEntityRegistryFactory { @@ -20,7 +19,8 @@ public class PluginEntityRegistryFactory { @Bean(name = "pluginEntityRegistry") @Nonnull - protected PluginEntityRegistryLoader getInstance() throws FileNotFoundException, MalformedURLException { + protected PluginEntityRegistryLoader getInstance() + throws FileNotFoundException, MalformedURLException { return new PluginEntityRegistryLoader(pluginRegistryPath); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index c50b4c9088bc2..723715a13b1c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -2,24 +2,24 @@ import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.post.PostService; import com.datahub.authentication.token.StatefulTokenService; import com.datahub.authentication.user.NativeUserService; import com.datahub.authorization.role.RoleService; -import com.datahub.authentication.post.PostService; import com.linkedin.datahub.graphql.GmsGraphQLEngine; import com.linkedin.datahub.graphql.GmsGraphQLEngineArgs; import com.linkedin.datahub.graphql.GraphQLEngine; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; -import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.gms.factory.auth.DataHubTokenServiceFactory; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.common.SiblingGraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.entity.RestliEntityClientFactory; +import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.recommendation.RecommendationServiceFactory; +import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.client.SystemJavaEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; @@ -29,11 +29,11 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; -import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; @@ -48,11 +48,17 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, RestliEntityClientFactory.class, - RecommendationServiceFactory.class, EntityRegistryFactory.class, DataHubTokenServiceFactory.class, - GitVersionFactory.class, SiblingGraphServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + RestliEntityClientFactory.class, + RecommendationServiceFactory.class, + EntityRegistryFactory.class, + DataHubTokenServiceFactory.class, + GitVersionFactory.class, + SiblingGraphServiceFactory.class +}) public class GraphQLEngineFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") @@ -169,7 +175,6 @@ public class GraphQLEngineFactory { @Value("${platformAnalytics.enabled}") // TODO: Migrate to DATAHUB_ANALYTICS_ENABLED private Boolean isAnalyticsEnabled; - @Bean(name = "graphQLEngine") @Nonnull protected GraphQLEngine getInstance() { @@ -211,8 +216,6 @@ protected GraphQLEngine getInstance() { args.setQueryService(_queryService); args.setFeatureFlags(_configProvider.getFeatureFlags()); args.setDataProductService(_dataProductService); - return new GmsGraphQLEngine( - args - ).builder().build(); + return new GmsGraphQLEngine(args).builder().build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java index 9beb617c4f6e8..78b9c5d52efdd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ingestion/IngestionSchedulerFactory.java @@ -16,7 +16,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Import({SystemAuthenticationFactory.class, RestliEntityClientFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class IngestionSchedulerFactory { @@ -33,17 +32,23 @@ public class IngestionSchedulerFactory { @Qualifier("configurationProvider") private ConfigurationProvider _configProvider; - @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after waiting 45 seconds for startup. + @Value("${ingestion.scheduler.delayIntervalSeconds:45}") // Boot up ingestion source cache after + // waiting 45 seconds for startup. private Integer _delayIntervalSeconds; - @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion sources 2 times per day. + @Value("${ingestion.scheduler.refreshIntervalSeconds:43200}") // By default, refresh ingestion + // sources 2 times per day. private Integer _refreshIntervalSeconds; @Bean(name = "ingestionScheduler") @Scope("singleton") @Nonnull protected IngestionScheduler getInstance() { - return new IngestionScheduler(_systemAuthentication, _entityClient, _configProvider.getIngestion(), - _delayIntervalSeconds, _refreshIntervalSeconds); + return new IngestionScheduler( + _systemAuthentication, + _entityClient, + _configProvider.getIngestion(), + _delayIntervalSeconds, + _refreshIntervalSeconds); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java index 675f015d9e378..41807d0daaa72 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaEventProducerFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.kafka; import com.linkedin.gms.factory.common.TopicConventionFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.mxe.TopicConvention; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.Producer; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({DataHubKafkaProducerFactory.class, TopicConventionFactory.class, KafkaHealthChecker.class}) @@ -28,14 +27,10 @@ public class DataHubKafkaEventProducerFactory { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "kafkaEventProducer") protected KafkaEventProducer createInstance() { - return new KafkaEventProducer( - kafkaProducer, - topicConvention, - kafkaHealthChecker); + return new KafkaEventProducer(kafkaProducer, topicConvention, kafkaHealthChecker); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java index 78b3de501e0e5..0b331ffc40be4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/DataHubKafkaProducerFactory.java @@ -1,11 +1,11 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import java.util.Arrays; import java.util.Map; @@ -23,11 +23,14 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @EnableConfigurationProperties({KafkaProperties.class}) -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class DataHubKafkaProducerFactory { @Autowired @@ -35,20 +38,26 @@ public class DataHubKafkaProducerFactory { private SchemaRegistryConfig _schemaRegistryConfig; @Bean(name = "kafkaProducer") - protected Producer createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected Producer createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - return new KafkaProducer<>(buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); + return new KafkaProducer<>( + buildProducerProperties(_schemaRegistryConfig, kafkaConfiguration, properties)); } - public static Map buildProducerProperties(SchemaRegistryConfig schemaRegistryConfig, - KafkaConfiguration kafkaConfiguration, KafkaProperties properties) { + public static Map buildProducerProperties( + SchemaRegistryConfig schemaRegistryConfig, + KafkaConfiguration kafkaConfiguration, + KafkaProperties properties) { KafkaProperties.Producer producerProps = properties.getProducer(); producerProps.setKeySerializer(StringSerializer.class); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - producerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + producerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map props = properties.buildProducerProperties(); @@ -56,18 +65,27 @@ public static Map buildProducerProperties(SchemaRegistryConfig s props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getSerializer()); props.put(ProducerConfig.RETRIES_CONFIG, kafkaConfiguration.getProducer().getRetryCount()); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getDeliveryTimeout()); - props.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, kafkaConfiguration.getProducer().getRequestTimeout()); - props.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, kafkaConfiguration.getProducer().getBackoffTimeout()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, kafkaConfiguration.getProducer().getCompressionType()); - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, kafkaConfiguration.getProducer().getMaxRequestSize()); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getDeliveryTimeout()); + props.put( + ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, + kafkaConfiguration.getProducer().getRequestTimeout()); + props.put( + ProducerConfig.RETRY_BACKOFF_MS_CONFIG, + kafkaConfiguration.getProducer().getBackoffTimeout()); + props.put( + ProducerConfig.COMPRESSION_TYPE_CONFIG, + kafkaConfiguration.getProducer().getCompressionType()); + props.put( + ProducerConfig.MAX_REQUEST_SIZE_CONFIG, + kafkaConfiguration.getProducer().getMaxRequestSize()); // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> props.put(entry.getKey(), entry.getValue())); + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> props.put(entry.getKey(), entry.getValue())); return props; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java index 7a9e80781d639..2a6338ac15e93 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/KafkaEventConsumerFactory.java @@ -1,16 +1,14 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.schemaregistry.AwsGlueSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.KafkaSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; - +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -26,98 +24,109 @@ @Slf4j @Configuration -@Import({KafkaSchemaRegistryFactory.class, AwsGlueSchemaRegistryFactory.class, InternalSchemaRegistryFactory.class}) +@Import({ + KafkaSchemaRegistryFactory.class, + AwsGlueSchemaRegistryFactory.class, + InternalSchemaRegistryFactory.class +}) public class KafkaEventConsumerFactory { - - private int kafkaEventConsumerConcurrency; - - @Bean(name = "kafkaConsumerFactory") - protected DefaultKafkaConsumerFactory createConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - SchemaRegistryConfig schemaRegistryConfig) { - kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - @Bean(name = "duheKafkaConsumerFactory") - protected DefaultKafkaConsumerFactory duheKafkaConsumerFactory( - @Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties baseKafkaProperties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { - - KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Map customizedProperties = buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, - schemaRegistryConfig); - - return new DefaultKafkaConsumerFactory<>(customizedProperties); - } - - private static Map buildCustomizedProperties(KafkaProperties baseKafkaProperties, - KafkaConfiguration kafkaConfiguration, - SchemaRegistryConfig schemaRegistryConfig) { - KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); - - // Specify (de)serializers for record keys and for record values. - consumerProps.setKeyDeserializer(StringDeserializer.class); - // Records will be flushed every 10 seconds. - consumerProps.setEnableAutoCommit(true); - consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); - - - // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); - } // else we rely on KafkaProperties which defaults to localhost:9092 - - Map customizedProperties = baseKafkaProperties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); - - // Override KafkaProperties with SchemaRegistryConfig only for non-empty values - schemaRegistryConfig.getProperties().entrySet() - .stream() - .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) - .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); - - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); - - return customizedProperties; - } - - @Bean(name = "kafkaEventConsumer") - protected KafkaListenerContainerFactory createInstance( - @Qualifier("kafkaConsumerFactory") DefaultKafkaConsumerFactory kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(kafkaEventConsumerConcurrency); - - log.info(String.format("Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", - kafkaEventConsumerConcurrency)); - - return factory; - } - - @Bean(name = "duheKafkaEventConsumer") - protected KafkaListenerContainerFactory duheKafkaEventConsumer( - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory kafkaConsumerFactory) { - - ConcurrentKafkaListenerContainerFactory factory = - new ConcurrentKafkaListenerContainerFactory<>(); - factory.setConsumerFactory(kafkaConsumerFactory); - factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); - factory.setConcurrency(1); - - log.info("Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); - return factory; - } -} \ No newline at end of file + private int kafkaEventConsumerConcurrency; + + @Bean(name = "kafkaConsumerFactory") + protected DefaultKafkaConsumerFactory createConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + SchemaRegistryConfig schemaRegistryConfig) { + kafkaEventConsumerConcurrency = provider.getKafka().getListener().getConcurrency(); + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + @Bean(name = "duheKafkaConsumerFactory") + protected DefaultKafkaConsumerFactory duheKafkaConsumerFactory( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties baseKafkaProperties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig schemaRegistryConfig) { + + KafkaConfiguration kafkaConfiguration = provider.getKafka(); + Map customizedProperties = + buildCustomizedProperties(baseKafkaProperties, kafkaConfiguration, schemaRegistryConfig); + + return new DefaultKafkaConsumerFactory<>(customizedProperties); + } + + private static Map buildCustomizedProperties( + KafkaProperties baseKafkaProperties, + KafkaConfiguration kafkaConfiguration, + SchemaRegistryConfig schemaRegistryConfig) { + KafkaProperties.Consumer consumerProps = baseKafkaProperties.getConsumer(); + + // Specify (de)serializers for record keys and for record values. + consumerProps.setKeyDeserializer(StringDeserializer.class); + // Records will be flushed every 10 seconds. + consumerProps.setEnableAutoCommit(true); + consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); + + // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + } // else we rely on KafkaProperties which defaults to localhost:9092 + + Map customizedProperties = baseKafkaProperties.buildConsumerProperties(); + customizedProperties.put( + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, schemaRegistryConfig.getDeserializer()); + + // Override KafkaProperties with SchemaRegistryConfig only for non-empty values + schemaRegistryConfig.getProperties().entrySet().stream() + .filter(entry -> entry.getValue() != null && !entry.getValue().toString().isEmpty()) + .forEach(entry -> customizedProperties.put(entry.getKey(), entry.getValue())); + + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + + return customizedProperties; + } + + @Bean(name = "kafkaEventConsumer") + protected KafkaListenerContainerFactory createInstance( + @Qualifier("kafkaConsumerFactory") + DefaultKafkaConsumerFactory kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(kafkaEventConsumerConcurrency); + + log.info( + String.format( + "Event-based KafkaListenerContainerFactory built successfully. Consumer concurrency = %s", + kafkaEventConsumerConcurrency)); + + return factory; + } + + @Bean(name = "duheKafkaEventConsumer") + protected KafkaListenerContainerFactory duheKafkaEventConsumer( + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory kafkaConsumerFactory) { + + ConcurrentKafkaListenerContainerFactory factory = + new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(kafkaConsumerFactory); + factory.setContainerCustomizer(new ThreadPoolContainerCustomizer()); + factory.setConcurrency(1); + + log.info( + "Event-based DUHE KafkaListenerContainerFactory built successfully. Consumer concurrency = 1"); + return factory; + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java index 14ffc01d75781..58cb311c526bc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactory.java @@ -1,11 +1,10 @@ package com.linkedin.gms.factory.kafka; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import java.time.Duration; import java.util.Arrays; import java.util.Map; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.ConsumerConfig; @@ -19,15 +18,15 @@ import org.springframework.kafka.config.KafkaListenerContainerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; - @Slf4j @Configuration @EnableConfigurationProperties({KafkaProperties.class}) public class SimpleKafkaConsumerFactory { @Bean(name = "simpleKafkaConsumer") - protected KafkaListenerContainerFactory createInstance(@Qualifier("configurationProvider") ConfigurationProvider - provider, KafkaProperties properties) { + protected KafkaListenerContainerFactory createInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); KafkaProperties.Consumer consumerProps = properties.getConsumer(); @@ -39,13 +38,16 @@ protected KafkaListenerContainerFactory createInstance(@Qualifier("configurat consumerProps.setAutoCommitInterval(Duration.ofSeconds(10)); // KAFKA_BOOTSTRAP_SERVER has precedence over SPRING_KAFKA_BOOTSTRAP_SERVERS - if (kafkaConfiguration.getBootstrapServers() != null && kafkaConfiguration.getBootstrapServers().length() > 0) { - consumerProps.setBootstrapServers(Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); + if (kafkaConfiguration.getBootstrapServers() != null + && kafkaConfiguration.getBootstrapServers().length() > 0) { + consumerProps.setBootstrapServers( + Arrays.asList(kafkaConfiguration.getBootstrapServers().split(","))); } // else we rely on KafkaProperties which defaults to localhost:9092 Map customizedProperties = properties.buildConsumerProperties(); - customizedProperties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, - kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); + customizedProperties.put( + ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, + kafkaConfiguration.getConsumer().getMaxPartitionFetchBytes()); ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); @@ -56,4 +58,4 @@ protected KafkaListenerContainerFactory createInstance(@Qualifier("configurat return factory; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java index 07cbccd93c595..f79026c8ee337 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/ThreadPoolContainerCustomizer.java @@ -5,13 +5,14 @@ import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; - public class ThreadPoolContainerCustomizer - implements ContainerCustomizer> { + implements ContainerCustomizer< + String, GenericRecord, ConcurrentMessageListenerContainer> { @Override public void configure(ConcurrentMessageListenerContainer container) { ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor(); - // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent threads up to configured value + // Default Queue Capacity is set to max, so we want to allow the thread pool to add concurrent + // threads up to configured value threadPoolTaskExecutor.setCorePoolSize(container.getConcurrency()); threadPoolTaskExecutor.setMaxPoolSize(container.getConcurrency()); threadPoolTaskExecutor.initialize(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java index ac1cbbc5cc5ff..a88e1d971973b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/AwsGlueSchemaRegistryFactory.java @@ -17,17 +17,19 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = AwsGlueSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = AwsGlueSchemaRegistryFactory.TYPE) public class AwsGlueSchemaRegistryFactory { public static final String TYPE = "AWS_GLUE"; @Value("${kafka.schemaRegistry.awsGlue.region}") private String awsRegion; + @Value("${kafka.schemaRegistry.awsGlue.registryName}") private Optional registryName; @@ -35,7 +37,8 @@ public class AwsGlueSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AWSSchemaRegistryConstants.AWS_REGION, awsRegion); props.put(AWSSchemaRegistryConstants.DATA_FORMAT, "AVRO"); @@ -43,7 +46,7 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr props.put(AWSSchemaRegistryConstants.AVRO_RECORD_TYPE, AvroRecordType.GENERIC_RECORD.getName()); registryName.ifPresent(s -> props.put(AWSSchemaRegistryConstants.REGISTRY_NAME, s)); log.info("Creating AWS Glue registry"); - return new SchemaRegistryConfig(GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, - props); + return new SchemaRegistryConfig( + GlueSchemaRegistryKafkaSerializer.class, GlueSchemaRegistryKafkaDeserializer.class, props); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java index aeef166a077c7..4819984307af9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/DUHESchemaRegistryFactory.java @@ -1,42 +1,40 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; +import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.boot.kafka.MockDUHEDeserializer; import com.linkedin.metadata.boot.kafka.MockDUHESerializer; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.HashMap; -import java.util.Map; - -import static com.linkedin.metadata.boot.kafka.DataHubUpgradeKafkaListener.TOPIC_NAME; - @Slf4j @Configuration public class DUHESchemaRegistryFactory { - public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; + public static final String DUHE_SCHEMA_REGISTRY_TOPIC_KEY = "duheTopicName"; - @Value(TOPIC_NAME) - private String duheTopicName; + @Value(TOPIC_NAME) + private String duheTopicName; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ - @Bean("duheSchemaRegistryConfig") - protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { - Map props = new HashMap<>(); - KafkaConfiguration kafkaConfiguration = provider.getKafka(); + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ + @Bean("duheSchemaRegistryConfig") + protected SchemaRegistryConfig duheSchemaRegistryConfig(ConfigurationProvider provider) { + Map props = new HashMap<>(); + KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); - props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); + props.put(DUHE_SCHEMA_REGISTRY_TOPIC_KEY, duheTopicName); - log.info("DataHub System Update Registry"); - return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); - } + log.info("DataHub System Update Registry"); + return new SchemaRegistryConfig(MockDUHESerializer.class, MockDUHEDeserializer.class, props); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java index 217dc15bbc3e8..8c814e5054758 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/InternalSchemaRegistryFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.kafka.schemaregistry; -import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.gms.factory.common.TopicConventionFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.registry.SchemaRegistryService; import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; import com.linkedin.mxe.TopicConvention; @@ -19,27 +19,30 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - @Slf4j @Configuration -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class InternalSchemaRegistryFactory { public static final String TYPE = "INTERNAL"; - /** - * Configure Kafka Producer/Consumer processes with a custom schema registry. - */ + /** Configure Kafka Producer/Consumer processes with a custom schema registry. */ @Bean("schemaRegistryConfig") @Nonnull - protected SchemaRegistryConfig getInstance(@Qualifier("configurationProvider") ConfigurationProvider provider) { + protected SchemaRegistryConfig getInstance( + @Qualifier("configurationProvider") ConfigurationProvider provider) { Map props = new HashMap<>(); KafkaConfiguration kafkaConfiguration = provider.getKafka(); - props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaConfiguration - .getSchemaRegistry().getUrl()); + props.put( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, + kafkaConfiguration.getSchemaRegistry().getUrl()); - log.info("Creating internal registry configuration for url {}", kafkaConfiguration.getSchemaRegistry().getUrl()); + log.info( + "Creating internal registry configuration for url {}", + kafkaConfiguration.getSchemaRegistry().getUrl()); return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java index 7b72ba3f3bb88..e6c255b99a9ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/KafkaSchemaRegistryFactory.java @@ -21,7 +21,9 @@ @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = KafkaSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = KafkaSchemaRegistryFactory.TYPE) public class KafkaSchemaRegistryFactory { public static final String TYPE = "KAFKA"; @@ -48,7 +50,8 @@ public class KafkaSchemaRegistryFactory { @Nonnull protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationProvider) { Map props = new HashMap<>(); - // FIXME: Properties for this factory should come from ConfigurationProvider object, specifically under the + // FIXME: Properties for this factory should come from ConfigurationProvider object, + // specifically under the // KafkaConfiguration class. See InternalSchemaRegistryFactory as an example. props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, kafkaSchemaRegistryUrl); props.put(withNamespace(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), sslTruststoreLocation); @@ -60,8 +63,11 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr if (sslKeystoreLocation.isEmpty()) { log.info("creating schema registry config using url: {}", kafkaSchemaRegistryUrl); } else { - log.info("creating schema registry config using url: {}, keystore location: {} and truststore location: {}", - kafkaSchemaRegistryUrl, sslTruststoreLocation, sslKeystoreLocation); + log.info( + "creating schema registry config using url: {}, keystore location: {} and truststore location: {}", + kafkaSchemaRegistryUrl, + sslTruststoreLocation, + sslKeystoreLocation); } return new SchemaRegistryConfig(KafkaAvroSerializer.class, KafkaAvroDeserializer.class, props); @@ -70,4 +76,4 @@ protected SchemaRegistryConfig getInstance(ConfigurationProvider configurationPr private String withNamespace(String configKey) { return SchemaRegistryClientConfig.CLIENT_NAMESPACE + configKey; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java index 1e2962bbda7c8..004a7abb88489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/kafka/schemaregistry/SchemaRegistryConfig.java @@ -3,7 +3,6 @@ import java.util.Map; import lombok.Data; - @Data public class SchemaRegistryConfig { private final Class serializer; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java index 8596a14b7fc24..1589b33862bfe 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/lineage/LineageServiceFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.lineage; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; -import javax.annotation.Nonnull; - import com.linkedin.metadata.service.LineageService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; @@ -12,7 +11,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class LineageServiceFactory { @@ -26,4 +24,4 @@ public class LineageServiceFactory { protected LineageService getInstance() throws Exception { return new LineageService(this._javaEntityClient); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java index 3a1f18692fdc6..ff48a922adf22 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/ownership/OwnershipTypeServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.ownership; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.OwnershipTypeService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OwnershipTypeServiceFactory { @@ -30,4 +29,4 @@ public class OwnershipTypeServiceFactory { protected OwnershipTypeService getInstance() throws Exception { return new OwnershipTypeService(_javaEntityClient, _authentication); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java index f98c5bd50467d..cf81cbf70d5eb 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/query/QueryServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.query; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.QueryService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class QueryServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java index 36b203f677c9c..dc68451c6fce1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/RecommendationServiceFactory.java @@ -10,9 +10,9 @@ import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.recommendation.candidatesource.DomainsCandidateSource; import com.linkedin.metadata.recommendation.candidatesource.MostPopularSource; +import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlySearchedSource; import com.linkedin.metadata.recommendation.candidatesource.RecentlyViewedSource; -import com.linkedin.metadata.recommendation.candidatesource.RecentlyEditedSource; import com.linkedin.metadata.recommendation.candidatesource.RecommendationSource; import com.linkedin.metadata.recommendation.candidatesource.TopPlatformsSource; import com.linkedin.metadata.recommendation.candidatesource.TopTagsSource; @@ -26,10 +26,15 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({TopPlatformsCandidateSourceFactory.class, RecentlyEditedCandidateSourceFactory.class, - MostPopularCandidateSourceFactory.class, TopTagsCandidateSourceFactory.class, TopTermsCandidateSourceFactory.class, DomainsCandidateSourceFactory.class}) +@Import({ + TopPlatformsCandidateSourceFactory.class, + RecentlyEditedCandidateSourceFactory.class, + MostPopularCandidateSourceFactory.class, + TopTagsCandidateSourceFactory.class, + TopTermsCandidateSourceFactory.class, + DomainsCandidateSourceFactory.class +}) public class RecommendationServiceFactory { @Autowired @@ -69,11 +74,16 @@ public class RecommendationServiceFactory { protected RecommendationsService getInstance() { // TODO: Make this class-name pluggable to minimize merge conflict potential. // This is where you can add new recommendation modules. - final List candidateSources = ImmutableList.of( - topPlatformsCandidateSource, - domainsCandidateSource, - recentlyViewedCandidateSource, recentlyEditedCandidateSource, _mostPopularCandidateSource, - topTagsCandidateSource, topTermsCandidateSource, recentlySearchedCandidateSource); + final List candidateSources = + ImmutableList.of( + topPlatformsCandidateSource, + domainsCandidateSource, + recentlyViewedCandidateSource, + recentlyEditedCandidateSource, + _mostPopularCandidateSource, + topTagsCandidateSource, + topTermsCandidateSource, + recentlySearchedCandidateSource); return new RecommendationsService(candidateSources, new SimpleRecommendationRanker()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java index c266b3635b16f..f3be4db147399 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/MostPopularCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class MostPopularCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java index 109cc8dbc82d1..ac227faf06c4c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyEditedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyEditedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java index 5209f65a2ec63..05b6f974eedca 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlySearchedCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class}) public class RecentlySearchedCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java index aea40b4d8eb46..6f17846efc1cd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/RecentlyViewedCandidateSourceFactory.java @@ -14,9 +14,12 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, EntityServiceFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + EntityServiceFactory.class +}) public class RecentlyViewedCandidateSourceFactory { @Autowired @Qualifier("elasticSearchRestHighLevelClient") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java index fc04bbcce31ee..ad241e7717545 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopPlatformsCandidateSourceFactory.java @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntityServiceFactory.class, EntitySearchServiceFactory.class}) public class TopPlatformsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java index 857a788454c34..fe5c2d03d1907 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTagsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTagsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java index b8d50169e49ab..36c53936094ff 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/recommendation/candidatesource/TopTermsCandidateSourceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; - @Configuration @Import({EntitySearchServiceFactory.class}) public class TopTermsCandidateSourceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java index c99d429e986b6..e4e7d04e311da 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/BaseElasticSearchComponentsFactory.java @@ -2,27 +2,28 @@ import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Value; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - -/** - * Factory for components required for any services using elasticsearch - */ +/** Factory for components required for any services using elasticsearch */ @Configuration -@Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, ElasticSearchBulkProcessorFactory.class, - ElasticSearchIndexBuilderFactory.class}) +@Import({ + RestHighLevelClientFactory.class, + IndexConventionFactory.class, + ElasticSearchBulkProcessorFactory.class, + ElasticSearchIndexBuilderFactory.class +}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class BaseElasticSearchComponentsFactory { @lombok.Value @@ -56,6 +57,7 @@ public static class BaseElasticSearchComponents { @Bean(name = "baseElasticSearchComponents") @Nonnull protected BaseElasticSearchComponents getInstance() { - return new BaseElasticSearchComponents(searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); + return new BaseElasticSearchComponents( + searchClient, indexConvention, bulkProcessor, indexBuilder, numRetries); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java index 845c63c32e0fd..d2292b215e62a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/CachingEntitySearchServiceFactory.java @@ -1,8 +1,8 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -13,7 +13,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class CachingEntitySearchServiceFactory { @@ -22,8 +21,7 @@ public class CachingEntitySearchServiceFactory { @Qualifier("entitySearchService") private EntitySearchService entitySearchService; - @Autowired - private CacheManager cacheManager; + @Autowired private CacheManager cacheManager; @Value("${searchService.resultBatchSize}") private Integer batchSize; @@ -36,9 +34,6 @@ public class CachingEntitySearchServiceFactory { @Nonnull protected CachingEntitySearchService getInstance() { return new CachingEntitySearchService( - cacheManager, - entitySearchService, - batchSize, - enableCache); + cacheManager, entitySearchService, batchSize, enableCache); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java index 5deffdb01d247..64b1fcc2f5695 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactory.java @@ -1,10 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; +import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; - -import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.support.WriteRequest; import org.opensearch.client.RestHighLevelClient; @@ -16,7 +15,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Slf4j @Configuration @Import({RestHighLevelClientFactory.class}) @@ -51,13 +49,13 @@ public class ElasticSearchBulkProcessorFactory { @Nonnull protected ESBulkProcessor getInstance() { return ESBulkProcessor.builder(searchClient) - .async(async) - .bulkFlushPeriod(bulkFlushPeriod) - .bulkRequestsLimit(bulkRequestsLimit) - .retryInterval(retryInterval) - .numRetries(numRetries) - .batchDelete(enableBatchDelete) - .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) - .build(); + .async(async) + .bulkFlushPeriod(bulkFlushPeriod) + .bulkRequestsLimit(bulkRequestsLimit) + .retryInterval(retryInterval) + .numRetries(numRetries) + .batchDelete(enableBatchDelete) + .writeRequestRefreshPolicy(WriteRequest.RefreshPolicy.valueOf(refreshPolicy)) + .build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java index b619ee9516dce..7bf04b467d205 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactory.java @@ -1,18 +1,23 @@ package com.linkedin.gms.factory.search; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.linkedin.gms.factory.common.GitVersionFactory; import com.linkedin.gms.factory.common.IndexConventionFactory; import com.linkedin.gms.factory.common.RestHighLevelClientFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.version.GitVersion; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -22,14 +27,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - @Configuration @Import({RestHighLevelClientFactory.class, IndexConventionFactory.class, GitVersionFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -66,30 +63,41 @@ public class ElasticSearchIndexBuilderFactory { @Bean(name = "elasticSearchIndexSettingsOverrides") @Nonnull protected Map> getIndexSettingsOverrides( - @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { + @Qualifier(INDEX_CONVENTION_BEAN) IndexConvention indexConvention) { return Stream.concat( parseIndexSettingsMap(indexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), - parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() - .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + .map(e -> Map.entry(indexConvention.getIndexName(e.getKey()), e.getValue())), + parseIndexSettingsMap(entityIndexSettingOverrides).entrySet().stream() + .map(e -> Map.entry(indexConvention.getEntityIndexName(e.getKey()), e.getValue()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } @Bean(name = "elasticSearchIndexBuilder") @Nonnull protected ESIndexBuilder getInstance( - @Qualifier("elasticSearchIndexSettingsOverrides") Map> overrides, - final ConfigurationProvider configurationProvider, final GitVersion gitVersion) { - return new ESIndexBuilder(searchClient, numShards, numReplicas, numRetries, refreshIntervalSeconds, overrides, - enableSettingsReindex, enableMappingsReindex, configurationProvider.getElasticSearch(), gitVersion); + @Qualifier("elasticSearchIndexSettingsOverrides") Map> overrides, + final ConfigurationProvider configurationProvider, + final GitVersion gitVersion) { + return new ESIndexBuilder( + searchClient, + numShards, + numReplicas, + numRetries, + refreshIntervalSeconds, + overrides, + enableSettingsReindex, + enableMappingsReindex, + configurationProvider.getElasticSearch(), + gitVersion); } @Nonnull private static Map> parseIndexSettingsMap(@Nullable String json) { - Optional>> parseOpt = Optional.ofNullable( - new Gson().fromJson(json, - new TypeToken>>() { }.getType())); + Optional>> parseOpt = + Optional.ofNullable( + new Gson() + .fromJson(json, new TypeToken>>() {}.getType())); return parseOpt.orElse(Map.of()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java index 6d8a62ac1fd18..2b6d495e4fe33 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ElasticSearchServiceFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; -import com.linkedin.metadata.config.search.SearchConfiguration; -import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import com.linkedin.metadata.config.search.SearchConfiguration; +import com.linkedin.metadata.config.search.custom.CustomSearchConfiguration; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; @@ -15,8 +14,9 @@ import com.linkedin.metadata.search.elasticsearch.query.ESBrowseDAO; import com.linkedin.metadata.search.elasticsearch.query.ESSearchDAO; import com.linkedin.metadata.search.elasticsearch.update.ESWriteDAO; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import java.io.IOException; import javax.annotation.Nonnull; - import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -25,9 +25,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; -import java.io.IOException; - - @Slf4j @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -47,30 +44,47 @@ public class ElasticSearchServiceFactory { @Qualifier("settingsBuilder") private SettingsBuilder settingsBuilder; - @Autowired - private EntityIndexBuilders entityIndexBuilders; + @Autowired private EntityIndexBuilders entityIndexBuilders; - @Autowired - private ConfigurationProvider configurationProvider; + @Autowired private ConfigurationProvider configurationProvider; @Bean(name = "elasticSearchService") @Nonnull - protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) throws IOException { + protected ElasticSearchService getInstance(ConfigurationProvider configurationProvider) + throws IOException { log.info("Search configuration: {}", configurationProvider.getElasticSearch().getSearch()); - ElasticSearchConfiguration elasticSearchConfiguration = configurationProvider.getElasticSearch(); + ElasticSearchConfiguration elasticSearchConfiguration = + configurationProvider.getElasticSearch(); SearchConfiguration searchConfiguration = elasticSearchConfiguration.getSearch(); - CustomSearchConfiguration customSearchConfiguration = searchConfiguration.getCustom() == null ? null + CustomSearchConfiguration customSearchConfiguration = + searchConfiguration.getCustom() == null + ? null : searchConfiguration.getCustom().resolve(YAML_MAPPER); ESSearchDAO esSearchDAO = - new ESSearchDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), - elasticSearchConfiguration.getImplementation(), searchConfiguration, customSearchConfiguration); - return new ElasticSearchService(entityIndexBuilders, esSearchDAO, - new ESBrowseDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - searchConfiguration, customSearchConfiguration), - new ESWriteDAO(entityRegistry, components.getSearchClient(), components.getIndexConvention(), - components.getBulkProcessor(), components.getNumRetries())); + new ESSearchDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + configurationProvider.getFeatureFlags().isPointInTimeCreationEnabled(), + elasticSearchConfiguration.getImplementation(), + searchConfiguration, + customSearchConfiguration); + return new ElasticSearchService( + entityIndexBuilders, + esSearchDAO, + new ESBrowseDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + searchConfiguration, + customSearchConfiguration), + new ESWriteDAO( + entityRegistry, + components.getSearchClient(), + components.getIndexConvention(), + components.getBulkProcessor(), + components.getNumRetries())); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java index 6bb206ee3ad61..334194b95c162 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntityIndexBuildersFactory.java @@ -10,26 +10,28 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class EntityIndexBuildersFactory { - @Autowired - @Qualifier("baseElasticSearchComponents") - private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; - - @Autowired - @Qualifier("entityRegistry") - private EntityRegistry entityRegistry; - - @Autowired - @Qualifier("settingsBuilder") - private SettingsBuilder settingsBuilder; - - - @Bean - protected EntityIndexBuilders entityIndexBuilders() { - return new EntityIndexBuilders(components.getIndexBuilder(), entityRegistry, components.getIndexConvention(), settingsBuilder); - } -} \ No newline at end of file + @Autowired + @Qualifier("baseElasticSearchComponents") + private BaseElasticSearchComponentsFactory.BaseElasticSearchComponents components; + + @Autowired + @Qualifier("entityRegistry") + private EntityRegistry entityRegistry; + + @Autowired + @Qualifier("settingsBuilder") + private SettingsBuilder settingsBuilder; + + @Bean + protected EntityIndexBuilders entityIndexBuilders() { + return new EntityIndexBuilders( + components.getIndexBuilder(), + entityRegistry, + components.getIndexConvention(), + settingsBuilder); + } +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java index 49dab31cca1d0..38fd27fb44024 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/EntitySearchServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchServiceFactory.class}) public class EntitySearchServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java index e2eef83bc6e3f..17103240c938b 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/LineageSearchServiceFactory.java @@ -2,10 +2,10 @@ import com.linkedin.gms.factory.common.GraphServiceFactory; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.LineageSearchService; import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.cache.CacheManager; import org.springframework.context.annotation.Bean; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @Import({GraphServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -23,11 +22,17 @@ public class LineageSearchServiceFactory { @Bean(name = "relationshipSearchService") @Primary @Nonnull - protected LineageSearchService getInstance(CacheManager cacheManager, GraphService graphService, - SearchService searchService, ConfigurationProvider configurationProvider) { + protected LineageSearchService getInstance( + CacheManager cacheManager, + GraphService graphService, + SearchService searchService, + ConfigurationProvider configurationProvider) { boolean cacheEnabled = configurationProvider.getFeatureFlags().isLineageSearchCacheEnabled(); - return new LineageSearchService(searchService, graphService, - cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, cacheEnabled, - configurationProvider.getCache().getSearch().getLineage()); + return new LineageSearchService( + searchService, + graphService, + cacheEnabled ? cacheManager.getCache("relationshipSearchService") : null, + cacheEnabled, + configurationProvider.getCache().getSearch().getLineage()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java index a186d2de770f3..9d9018bd31f07 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchDocumentTransformerFactory.java @@ -1,13 +1,12 @@ package com.linkedin.gms.factory.search; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchDocumentTransformerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java index 64bb0218a0d71..1cb905665e489 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SearchServiceFactory.java @@ -1,13 +1,13 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.cache.EntityDocCountCache; import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.search.ranker.SearchRanker; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -16,7 +16,6 @@ import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SearchServiceFactory { @@ -42,8 +41,10 @@ public class SearchServiceFactory { @Nonnull protected SearchService getInstance(ConfigurationProvider configurationProvider) { return new SearchService( - new EntityDocCountCache(entityRegistry, entitySearchService, configurationProvider.getCache() - .getHomepage().getEntityCounts()), + new EntityDocCountCache( + entityRegistry, + entitySearchService, + configurationProvider.getCache().getHomepage().getEntityCounts()), cachingEntitySearchService, searchRanker); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java index 840a370957706..ce1d6f12c58b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/SettingsBuilderFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search; import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @Import(EntityRegistryFactory.class) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java index 1040edca30bfb..b010358bad81c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/ranker/SearchRankerFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SearchRankerFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java index 60bcd9ea22be6..32ad2175c9052 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/search/views/ViewServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.search.views; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.ViewService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class ViewServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java index a1cac07e3fb03..64093c54d0410 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java @@ -7,7 +7,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; - @Configuration public class SecretServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java index 2e22d43913493..f0d09a815628d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/settings/SettingsServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.settings; import com.datahub.authentication.Authentication; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.client.JavaEntityClient; import com.linkedin.metadata.service.SettingsService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -12,7 +12,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class SettingsServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java index 2610ebd3528cd..b735e490f583e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/DailyReport.java @@ -1,5 +1,7 @@ package com.linkedin.gms.factory.telemetry; +import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; import com.linkedin.datahub.graphql.generated.DateRange; @@ -12,13 +14,11 @@ import java.io.IOException; import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; import org.joda.time.DateTime; import org.json.JSONObject; +import org.opensearch.client.RestHighLevelClient; import org.springframework.scheduling.annotation.Scheduled; -import static com.linkedin.gms.factory.telemetry.TelemetryUtils.*; - @Slf4j public class DailyReport { @@ -32,8 +32,12 @@ public class DailyReport { private MixpanelAPI mixpanel; private MessageBuilder mixpanelBuilder; - public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticClient, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { + public DailyReport( + IndexConvention indexConvention, + RestHighLevelClient elasticClient, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { this._indexConvention = indexConvention; this._elasticClient = elasticClient; this._configurationProvider = configurationProvider; @@ -43,7 +47,10 @@ public DailyReport(IndexConvention indexConvention, RestHighLevelClient elasticC String clientId = getClientId(entityService); // initialize MixPanel instance and message builder - mixpanel = new MixpanelAPI("https://track.datahubproject.io/mp/track", "https://track.datahubproject.io/mp/engage"); + mixpanel = + new MixpanelAPI( + "https://track.datahubproject.io/mp/track", + "https://track.datahubproject.io/mp/engage"); mixpanelBuilder = new MessageBuilder(MIXPANEL_TOKEN); // set user-level properties @@ -72,24 +79,48 @@ public void dailyReport() { DateTime lastWeek = endDate.minusWeeks(1); DateTime lastMonth = endDate.minusMonths(1); - DateRange dayRange = new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); - DateRange weekRange = new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); - DateRange monthRange = new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); + DateRange dayRange = + new DateRange(String.valueOf(yesterday.getMillis()), String.valueOf(endDate.getMillis())); + DateRange weekRange = + new DateRange(String.valueOf(lastWeek.getMillis()), String.valueOf(endDate.getMillis())); + DateRange monthRange = + new DateRange(String.valueOf(lastMonth.getMillis()), String.valueOf(endDate.getMillis())); int dailyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(dayRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(dayRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int weeklyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(weekRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(weekRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); int monthlyActiveUsers = - analyticsService.getHighlights(analyticsService.getUsageIndexName(), Optional.of(monthRange), - ImmutableMap.of(), ImmutableMap.of(), Optional.of("browserId")); + analyticsService.getHighlights( + analyticsService.getUsageIndexName(), + Optional.of(monthRange), + ImmutableMap.of(), + ImmutableMap.of(), + Optional.of("browserId")); // floor to nearest power of 10 - dailyActiveUsers = dailyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); - weeklyActiveUsers = weeklyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); - monthlyActiveUsers = monthlyActiveUsers <= 0 ? 0 : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); + dailyActiveUsers = + dailyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(dailyActiveUsers) / Math.log(2))); + weeklyActiveUsers = + weeklyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(weeklyActiveUsers) / Math.log(2))); + monthlyActiveUsers = + monthlyActiveUsers <= 0 + ? 0 + : (int) Math.pow(2, (int) (Math.log(monthlyActiveUsers) / Math.log(2))); // set user-level properties JSONObject report = new JSONObject(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java index 8178ce1399aa3..b9330d5827419 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelApiFactory.java @@ -8,7 +8,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class MixpanelApiFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java index 5385c5e81f804..f64766534469d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/MixpanelMessageBuilderFactory.java @@ -8,10 +8,8 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) - public class MixpanelMessageBuilderFactory { private static final String MIXPANEL_TOKEN = "5ee83d940754d63cacbf7d34daa6f44a"; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java index 7cdca996a8131..4986e705fd7b4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/ScheduledAnalyticsFactory.java @@ -13,17 +13,20 @@ import org.springframework.context.annotation.Configuration; import org.springframework.scheduling.annotation.EnableScheduling; - @Slf4j @Configuration @EnableScheduling public class ScheduledAnalyticsFactory { - @Bean - @ConditionalOnProperty("telemetry.enabledServer") - public DailyReport dailyReport(@Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, - @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, - ConfigurationProvider configurationProvider, EntityService entityService, GitVersion gitVersion) { - return new DailyReport(indexConvention, elasticClient, configurationProvider, entityService, gitVersion); - } + @Bean + @ConditionalOnProperty("telemetry.enabledServer") + public DailyReport dailyReport( + @Qualifier("elasticSearchRestHighLevelClient") RestHighLevelClient elasticClient, + @Qualifier(IndexConventionFactory.INDEX_CONVENTION_BEAN) IndexConvention indexConvention, + ConfigurationProvider configurationProvider, + EntityService entityService, + GitVersion gitVersion) { + return new DailyReport( + indexConvention, elasticClient, configurationProvider, entityService, gitVersion); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java index 3bbb542b2cf5a..748acb4a9499e 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TelemetryUtils.java @@ -1,45 +1,44 @@ package com.linkedin.gms.factory.telemetry; import com.linkedin.common.AuditStamp; -import com.linkedin.telemetry.TelemetryClientId; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.telemetry.TelemetryClientId; import java.util.UUID; - import lombok.extern.slf4j.Slf4j; - @Slf4j public final class TelemetryUtils { - public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; - public static final String CLIENT_ID_ASPECT = "telemetryClientId"; - - private static String _clientId; + public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; + public static final String CLIENT_ID_ASPECT = "telemetryClientId"; + private static String _clientId; - public static String getClientId(EntityService entityService) { - if (_clientId == null) { - createClientIdIfNotPresent(entityService); - RecordTemplate clientIdTemplate = entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); - // Should always be present here from above, so no need for null check - _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); - } - return _clientId; + public static String getClientId(EntityService entityService) { + if (_clientId == null) { + createClientIdIfNotPresent(entityService); + RecordTemplate clientIdTemplate = + entityService.getLatestAspect(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT); + // Should always be present here from above, so no need for null check + _clientId = ((TelemetryClientId) clientIdTemplate).getClientId(); } - - private static void createClientIdIfNotPresent(EntityService entityService) { - String uuid = UUID.randomUUID().toString(); - TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); - final AuditStamp clientIdStamp = new AuditStamp(); - clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); - clientIdStamp.setTime(System.currentTimeMillis()); - entityService.ingestAspectIfNotPresent(UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); - } - private TelemetryUtils() { - throw new UnsupportedOperationException(); - } - + return _clientId; + } + + private static void createClientIdIfNotPresent(EntityService entityService) { + String uuid = UUID.randomUUID().toString(); + TelemetryClientId clientId = new TelemetryClientId().setClientId(uuid); + final AuditStamp clientIdStamp = new AuditStamp(); + clientIdStamp.setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)); + clientIdStamp.setTime(System.currentTimeMillis()); + entityService.ingestAspectIfNotPresent( + UrnUtils.getUrn(CLIENT_ID_URN), CLIENT_ID_ASPECT, clientId, clientIdStamp, null); + } + + private TelemetryUtils() { + throw new UnsupportedOperationException(); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index bb166af5501b3..4e858fb5cdefd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -1,9 +1,9 @@ package com.linkedin.gms.factory.telemetry; import com.datahub.telemetry.TrackingService; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.version.GitVersion; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; @@ -15,19 +15,21 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TrackingServiceFactory { @Autowired(required = false) @Qualifier("mixpanelApi") private MixpanelAPI _mixpanelAPI; + @Autowired(required = false) @Qualifier("mixpanelMessageBuilder") private MessageBuilder _mixpanelMessageBuilder; + @Autowired @Qualifier("dataHubSecretService") private SecretService _secretService; + @Autowired @Qualifier("entityService") private EntityService _entityService; @@ -40,7 +42,11 @@ public class TrackingServiceFactory { @ConditionalOnProperty("telemetry.enabledServer") @Scope("singleton") protected TrackingService getInstance() throws Exception { - return new TrackingService(this._mixpanelAPI, this._mixpanelMessageBuilder, this._secretService, - this._entityService, this._gitVersion); + return new TrackingService( + this._mixpanelAPI, + this._mixpanelMessageBuilder, + this._secretService, + this._entityService, + this._gitVersion); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java index 89a7e7dd8d71a..f1b040ed78f86 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/EntityChangeEventGeneratorRegistryFactory.java @@ -1,16 +1,18 @@ package com.linkedin.gms.factory.timeline; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemRestliEntityClient; import com.linkedin.metadata.timeline.eventgenerator.AssertionRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DataProcessInstanceRunEventChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DatasetPropertiesChangeEventGenerator; -import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.DeprecationChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableDatasetPropertiesChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EditableSchemaMetadataChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.EntityKeyChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlobalTagsChangeEventGenerator; +import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermInfoChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.GlossaryTermsChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.InstitutionalMemoryChangeEventGenerator; import com.linkedin.metadata.timeline.eventgenerator.OwnershipChangeEventGenerator; @@ -25,35 +27,38 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.metadata.Constants.*; - - @Configuration public class EntityChangeEventGeneratorRegistryFactory { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "entityChangeEventGeneratorRegistry") @DependsOn({"restliEntityClient", "systemAuthentication"}) @Singleton @Nonnull - protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry entityChangeEventGeneratorRegistry() { - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + entityChangeEventGeneratorRegistry() { + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final Authentication systemAuthentication = applicationContext.getBean(Authentication.class); - final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry registry = - new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); + final com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry + registry = + new com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGeneratorRegistry(); registry.register(SCHEMA_METADATA_ASPECT_NAME, new SchemaMetadataChangeEventGenerator()); - registry.register(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); + registry.register( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, new EditableSchemaMetadataChangeEventGenerator()); registry.register(GLOBAL_TAGS_ASPECT_NAME, new GlobalTagsChangeEventGenerator()); registry.register(GLOSSARY_TERMS_ASPECT_NAME, new GlossaryTermsChangeEventGenerator()); registry.register(OWNERSHIP_ASPECT_NAME, new OwnershipChangeEventGenerator()); - registry.register(INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); + registry.register( + INSTITUTIONAL_MEMORY_ASPECT_NAME, new InstitutionalMemoryChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); registry.register(GLOSSARY_TERM_INFO_ASPECT_NAME, new GlossaryTermInfoChangeEventGenerator()); registry.register(DOMAINS_ASPECT_NAME, new SingleDomainChangeEventGenerator()); registry.register(DATASET_PROPERTIES_ASPECT_NAME, new DatasetPropertiesChangeEventGenerator()); - registry.register(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, new EditableDatasetPropertiesChangeEventGenerator()); + registry.register( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + new EditableDatasetPropertiesChangeEventGenerator()); // Entity Lifecycle Differs registry.register(DATASET_KEY_ASPECT_NAME, new EntityKeyChangeEventGenerator<>()); @@ -73,7 +78,8 @@ protected com.linkedin.metadata.timeline.eventgenerator.EntityChangeEventGenerat registry.register(ASSERTION_RUN_EVENT_ASPECT_NAME, new AssertionRunEventChangeEventGenerator()); // Data Process Instance differs - registry.register(DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + registry.register( + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, new DataProcessInstanceRunEventChangeEventGenerator(entityClient)); // TODO: Add ML models. diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java index baa22d401387f..bc121da4e43dd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeline/TimelineServiceFactory.java @@ -1,19 +1,17 @@ package com.linkedin.gms.factory.timeline; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeline.TimelineServiceImpl; +import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; import org.springframework.context.annotation.PropertySource; -import javax.annotation.Nonnull; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class TimelineServiceFactory { @@ -21,7 +19,8 @@ public class TimelineServiceFactory { @Bean(name = "timelineService") @DependsOn({"entityAspectDao", "entityService", "entityRegistry"}) @Nonnull - protected TimelineService timelineService(@Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { + protected TimelineService timelineService( + @Qualifier("entityAspectDao") AspectDao aspectDao, EntityRegistry entityRegistry) { return new TimelineServiceImpl(aspectDao, entityRegistry); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java index e3cc772f21c40..bba82bb5d0569 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/ElasticSearchTimeseriesAspectServiceFactory.java @@ -2,8 +2,8 @@ import com.linkedin.gms.factory.entityregistry.EntityRegistryFactory; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.timeseries.elastic.ElasticSearchTimeseriesAspectService; import com.linkedin.metadata.timeseries.elastic.indexbuilder.TimeseriesAspectIndexBuilders; import javax.annotation.Nonnull; @@ -14,7 +14,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @Import({BaseElasticSearchComponentsFactory.class, EntityRegistryFactory.class}) @@ -30,8 +29,13 @@ public class ElasticSearchTimeseriesAspectServiceFactory { @Bean(name = "elasticSearchTimeseriesAspectService") @Nonnull protected ElasticSearchTimeseriesAspectService getInstance() { - return new ElasticSearchTimeseriesAspectService(components.getSearchClient(), components.getIndexConvention(), - new TimeseriesAspectIndexBuilders(components.getIndexBuilder(), entityRegistry, - components.getIndexConvention()), entityRegistry, components.getBulkProcessor(), components.getNumRetries()); + return new ElasticSearchTimeseriesAspectService( + components.getSearchClient(), + components.getIndexConvention(), + new TimeseriesAspectIndexBuilders( + components.getIndexBuilder(), entityRegistry, components.getIndexConvention()), + entityRegistry, + components.getBulkProcessor(), + components.getNumRetries()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java index 76090770ace11..7d4afa661aba0 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/timeseries/TimeseriesAspectServiceFactory.java @@ -10,7 +10,6 @@ import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; - @Configuration @Import({ElasticSearchTimeseriesAspectServiceFactory.class}) public class TimeseriesAspectServiceFactory { diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index d2bd89de8767a..03e066a912e44 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -2,12 +2,14 @@ import com.datahub.authentication.Authentication; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.restli.DefaultRestliClientFactory; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; import com.linkedin.usage.UsageClient; +import java.util.HashMap; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -15,10 +17,6 @@ import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; -import java.util.HashMap; -import java.util.Map; - - @Configuration @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class UsageClientFactory { @@ -49,13 +47,19 @@ public class UsageClientFactory { private ConfigurationProvider configurationProvider; @Bean("usageClient") - public UsageClient getUsageClient(@Qualifier("systemAuthentication") final Authentication systemAuthentication) { + public UsageClient getUsageClient( + @Qualifier("systemAuthentication") final Authentication systemAuthentication) { Map params = new HashMap<>(); params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); - Client restClient = DefaultRestliClientFactory.getRestLiClient(gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); - return new UsageClient(restClient, new ExponentialBackoff(retryInterval), numRetries, systemAuthentication, - configurationProvider.getCache().getClient().getUsageClient()); + Client restClient = + DefaultRestliClientFactory.getRestLiClient( + gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); + return new UsageClient( + restClient, + new ExponentialBackoff(retryInterval), + numRetries, + systemAuthentication, + configurationProvider.getCache().getClient().getUsageClient()); } } - diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java index 811ea84bc7240..2d1b79fdace48 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapManager.java @@ -7,10 +7,7 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; - -/** - * Responsible for coordinating boot-time logic. - */ +/** Responsible for coordinating boot-time logic. */ @Slf4j @Component public class BootstrapManager { @@ -30,22 +27,39 @@ public void start() { for (int i = 0; i < stepsToExecute.size(); i++) { final BootstrapStep step = stepsToExecute.get(i); if (step.getExecutionMode() == BootstrapStep.ExecutionMode.BLOCKING) { - log.info("Executing bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); + log.info( + "Executing bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); try { step.execute(); } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Exiting...", step.name()), e); + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Exiting...", step.name()), + e); System.exit(1); } } else { // Async - log.info("Starting asynchronous bootstrap step {}/{} with name {}...", i + 1, stepsToExecute.size(), step.name()); - CompletableFuture.runAsync(() -> { - try { - step.execute(); - } catch (Exception e) { - log.error(String.format("Caught exception while executing bootstrap step %s. Continuing...", step.name()), e); - } - }, _asyncExecutor); + log.info( + "Starting asynchronous bootstrap step {}/{} with name {}...", + i + 1, + stepsToExecute.size(), + step.name()); + CompletableFuture.runAsync( + () -> { + try { + step.execute(); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while executing bootstrap step %s. Continuing...", + step.name()), + e); + } + }, + _asyncExecutor); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java index 876a0871fa4cb..dc82fc4907edc 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/BootstrapStep.java @@ -10,29 +10,19 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.upgrade.DataHubUpgradeResult; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; - -/** - * A single step in the Bootstrap process. - */ +/** A single step in the Bootstrap process. */ public interface BootstrapStep { - /** - * A human-readable name for the boot step. - */ + /** A human-readable name for the boot step. */ String name(); - /** - * Execute a boot-time step, or throw an exception on failure. - */ + /** Execute a boot-time step, or throw an exception on failure. */ void execute() throws Exception; - /** - * Return the execution mode of this step - */ + /** Return the execution mode of this step */ @Nonnull default ExecutionMode getExecutionMode() { return ExecutionMode.BLOCKING; @@ -46,16 +36,17 @@ enum ExecutionMode { } static Urn getUpgradeUrn(String upgradeId) { - return EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + return EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } static void setUpgradeResult(Urn urn, EntityService entityService) throws URISyntaxException { - final AuditStamp auditStamp = new AuditStamp() + final AuditStamp auditStamp = + new AuditStamp() .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) .setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult() - .setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(urn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java index 032b934a7ba87..801a902b7f835 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/OnBootApplicationListener.java @@ -16,24 +16,27 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Configuration; import org.springframework.context.event.ContextRefreshedEvent; import org.springframework.context.event.EventListener; import org.springframework.stereotype.Component; import org.springframework.web.context.WebApplicationContext; -import org.springframework.context.annotation.Configuration; - -/** - * Responsible for coordinating starting steps that happen before the application starts up. - */ +/** Responsible for coordinating starting steps that happen before the application starts up. */ @Configuration @Slf4j @Component public class OnBootApplicationListener { - private static final Set ACCEPTED_HTTP_CODES = Set.of(HttpStatus.SC_OK, HttpStatus.SC_MOVED_PERMANENTLY, - HttpStatus.SC_MOVED_TEMPORARILY, HttpStatus.SC_FORBIDDEN, HttpStatus.SC_UNAUTHORIZED); + private static final Set ACCEPTED_HTTP_CODES = + Set.of( + HttpStatus.SC_OK, + HttpStatus.SC_MOVED_PERMANENTLY, + HttpStatus.SC_MOVED_TEMPORARILY, + HttpStatus.SC_FORBIDDEN, + HttpStatus.SC_UNAUTHORIZED); - private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = String.format("%s:", WebApplicationContext.class.getName()); + private static final String ROOT_WEB_APPLICATION_CONTEXT_ID = + String.format("%s:", WebApplicationContext.class.getName()); private final CloseableHttpClient httpClient = HttpClients.createDefault(); @@ -52,8 +55,10 @@ public class OnBootApplicationListener { @EventListener(ContextRefreshedEvent.class) public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { - log.warn("OnBootApplicationListener context refreshed! {} event: {}", - ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), event); + log.warn( + "OnBootApplicationListener context refreshed! {} event: {}", + ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId()), + event); String schemaRegistryType = provider.getKafka().getSchemaRegistry().getType(); if (ROOT_WEB_APPLICATION_CONTEXT_ID.equals(event.getApplicationContext().getId())) { if (InternalSchemaRegistryFactory.TYPE.equals(schemaRegistryType)) { @@ -66,29 +71,31 @@ public void onApplicationEvent(@Nonnull ContextRefreshedEvent event) { public Runnable isSchemaRegistryAPIServletReady() { return () -> { - final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); - int timeouts = _servletsWaitTimeout; - boolean openAPIServeletReady = false; - while (!openAPIServeletReady && timeouts > 0) { - try { - log.info("Sleeping for 1 second"); - Thread.sleep(1000); - StatusLine statusLine = httpClient.execute(request).getStatusLine(); - if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { - log.info("Connected! Authentication not tested."); - openAPIServeletReady = true; - } - } catch (IOException | InterruptedException e) { - log.info("Failed to connect to open servlet: {}", e.getMessage()); + final HttpGet request = new HttpGet(provider.getKafka().getSchemaRegistry().getUrl()); + int timeouts = _servletsWaitTimeout; + boolean openAPIServeletReady = false; + while (!openAPIServeletReady && timeouts > 0) { + try { + log.info("Sleeping for 1 second"); + Thread.sleep(1000); + StatusLine statusLine = httpClient.execute(request).getStatusLine(); + if (ACCEPTED_HTTP_CODES.contains(statusLine.getStatusCode())) { + log.info("Connected! Authentication not tested."); + openAPIServeletReady = true; } - timeouts--; + } catch (IOException | InterruptedException e) { + log.info("Failed to connect to open servlet: {}", e.getMessage()); } - if (!openAPIServeletReady) { - log.error("Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", timeouts); - System.exit(1); - } else { + timeouts--; + } + if (!openAPIServeletReady) { + log.error( + "Failed to bootstrap DataHub, OpenAPI servlet was not ready after {} seconds", + timeouts); + System.exit(1); + } else { _bootstrapManager.start(); - } + } }; } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java index dbbcf3a139bf1..9ccb2c3f650bd 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/UpgradeStep.java @@ -17,7 +17,6 @@ import java.util.Collections; import lombok.extern.slf4j.Slf4j; - @Slf4j public abstract class UpgradeStep implements BootstrapStep { @@ -30,8 +29,9 @@ public UpgradeStep(EntityService entityService, String version, String upgradeId this._entityService = entityService; this._version = version; this._upgradeId = upgradeId; - this._upgradeUrn = EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(upgradeId), - Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + this._upgradeUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(upgradeId), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); } @Override @@ -47,7 +47,8 @@ public void execute() throws Exception { upgrade(); ingestUpgradeResultAspect(); } catch (Exception e) { - String errorMessage = String.format("Error when running %s for version %s", _upgradeId, _version); + String errorMessage = + String.format("Error when running %s for version %s", _upgradeId, _version); cleanUpgradeAfterError(e, errorMessage); throw new RuntimeException(errorMessage, e); } @@ -62,18 +63,29 @@ public String name() { private boolean hasUpgradeRan() { try { - EntityResponse response = _entityService.getEntityV2(Constants.DATA_HUB_UPGRADE_ENTITY_NAME, _upgradeUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); - - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + _upgradeUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(_version)) { return true; } } } catch (Exception e) { - log.error("Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", e); + log.error( + "Error when checking to see if datahubUpgrade entity exists. Commencing with upgrade...", + e); return false; } return false; @@ -81,7 +93,9 @@ private boolean hasUpgradeRan() { private void ingestUpgradeRequestAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(_version); @@ -97,8 +111,11 @@ private void ingestUpgradeRequestAspect() throws URISyntaxException { private void ingestUpgradeResultAspect() throws URISyntaxException { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(_upgradeUrn); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java index 4aed7791470da..8b0c72c4c91d5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/dependencies/BootstrapDependency.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.boot.dependencies; -/** - * Empty interface for passing named bean references to bootstrap steps - */ +/** Empty interface for passing named bean references to bootstrap steps */ public interface BootstrapDependency { /** * Execute any dependent methods, avoids increasing module dependencies + * * @return true if the dependency has successfully executed its expected methods, false otherwise */ boolean waitForBootstrap(); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java index c4e6c941303c8..70fa91ae61861 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/BootstrapManagerFactory.java @@ -31,7 +31,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.transformer.SearchDocumentTransformer; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; @@ -44,10 +43,13 @@ import org.springframework.context.annotation.Scope; import org.springframework.core.io.Resource; - @Configuration -@Import({EntityServiceFactory.class, EntityRegistryFactory.class, EntitySearchServiceFactory.class, - SearchDocumentTransformerFactory.class}) +@Import({ + EntityServiceFactory.class, + EntityRegistryFactory.class, + EntitySearchServiceFactory.class, + SearchDocumentTransformerFactory.class +}) public class BootstrapManagerFactory { @Autowired @@ -82,8 +84,7 @@ public class BootstrapManagerFactory { @Qualifier("dataHubUpgradeKafkaListener") private BootstrapDependency _dataHubUpgradeKafkaListener; - @Autowired - private ConfigurationProvider _configurationProvider; + @Autowired private ConfigurationProvider _configurationProvider; @Value("${bootstrap.upgradeDefaultBrowsePaths.enabled}") private Boolean _upgradeDefaultBrowsePathsEnabled; @@ -103,9 +104,15 @@ public class BootstrapManagerFactory { protected BootstrapManager createInstance() { final IngestRootUserStep ingestRootUserStep = new IngestRootUserStep(_entityService); final IngestPoliciesStep ingestPoliciesStep = - new IngestPoliciesStep(_entityRegistry, _entityService, _entitySearchService, _searchDocumentTransformer, _policiesResource); + new IngestPoliciesStep( + _entityRegistry, + _entityService, + _entitySearchService, + _searchDocumentTransformer, + _policiesResource); final IngestRolesStep ingestRolesStep = new IngestRolesStep(_entityService, _entityRegistry); - final IngestDataPlatformsStep ingestDataPlatformsStep = new IngestDataPlatformsStep(_entityService); + final IngestDataPlatformsStep ingestDataPlatformsStep = + new IngestDataPlatformsStep(_entityService); final IngestDataPlatformInstancesStep ingestDataPlatformInstancesStep = new IngestDataPlatformInstancesStep(_entityService, _migrationsDao); final RestoreGlossaryIndices restoreGlossaryIndicesStep = @@ -114,28 +121,34 @@ protected BootstrapManager createInstance() { new IndexDataPlatformsStep(_entityService, _entitySearchService, _entityRegistry); final RestoreDbtSiblingsIndices restoreDbtSiblingsIndices = new RestoreDbtSiblingsIndices(_entityService, _entityRegistry); - final RemoveClientIdAspectStep removeClientIdAspectStep = new RemoveClientIdAspectStep(_entityService); - final RestoreColumnLineageIndices restoreColumnLineageIndices = new RestoreColumnLineageIndices(_entityService, _entityRegistry); - final IngestDefaultGlobalSettingsStep ingestSettingsStep = new IngestDefaultGlobalSettingsStep(_entityService); - final WaitForSystemUpdateStep waitForSystemUpdateStep = new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, - _configurationProvider); - final IngestOwnershipTypesStep ingestOwnershipTypesStep = new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); - - final List finalSteps = new ArrayList<>(ImmutableList.of( - waitForSystemUpdateStep, - ingestRootUserStep, - ingestPoliciesStep, - ingestRolesStep, - ingestDataPlatformsStep, - ingestDataPlatformInstancesStep, - _ingestRetentionPoliciesStep, - ingestOwnershipTypesStep, - ingestSettingsStep, - restoreGlossaryIndicesStep, - removeClientIdAspectStep, - restoreDbtSiblingsIndices, - indexDataPlatformsStep, - restoreColumnLineageIndices)); + final RemoveClientIdAspectStep removeClientIdAspectStep = + new RemoveClientIdAspectStep(_entityService); + final RestoreColumnLineageIndices restoreColumnLineageIndices = + new RestoreColumnLineageIndices(_entityService, _entityRegistry); + final IngestDefaultGlobalSettingsStep ingestSettingsStep = + new IngestDefaultGlobalSettingsStep(_entityService); + final WaitForSystemUpdateStep waitForSystemUpdateStep = + new WaitForSystemUpdateStep(_dataHubUpgradeKafkaListener, _configurationProvider); + final IngestOwnershipTypesStep ingestOwnershipTypesStep = + new IngestOwnershipTypesStep(_entityService, _ownershipTypesResource); + + final List finalSteps = + new ArrayList<>( + ImmutableList.of( + waitForSystemUpdateStep, + ingestRootUserStep, + ingestPoliciesStep, + ingestRolesStep, + ingestDataPlatformsStep, + ingestDataPlatformInstancesStep, + _ingestRetentionPoliciesStep, + ingestOwnershipTypesStep, + ingestSettingsStep, + restoreGlossaryIndicesStep, + removeClientIdAspectStep, + restoreDbtSiblingsIndices, + indexDataPlatformsStep, + restoreColumnLineageIndices)); if (_upgradeDefaultBrowsePathsEnabled) { finalSteps.add(new UpgradeDefaultBrowsePathsStep(_entityService)); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java index e038cb230c458..2436938c6c026 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/factories/IngestRetentionPoliciesStepFactory.java @@ -1,10 +1,10 @@ package com.linkedin.metadata.boot.factories; import com.linkedin.gms.factory.entity.RetentionServiceFactory; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.boot.steps.IngestRetentionPoliciesStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -15,7 +15,6 @@ import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.Scope; - @Configuration @Import({RetentionServiceFactory.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) @@ -42,6 +41,11 @@ public class IngestRetentionPoliciesStepFactory { @Scope("singleton") @Nonnull protected IngestRetentionPoliciesStep createInstance() { - return new IngestRetentionPoliciesStep(_retentionService, _entityService, _enableRetention, _applyOnBootstrap, _pluginRegistryPath); + return new IngestRetentionPoliciesStep( + _retentionService, + _entityService, + _enableRetention, + _applyOnBootstrap, + _pluginRegistryPath); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java index 11d12072e12b7..263cc335a8a40 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/DataHubUpgradeKafkaListener.java @@ -7,11 +7,9 @@ import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; import com.linkedin.mxe.Topics; - import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; - import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.GenericRecord; import org.apache.kafka.clients.consumer.Consumer; @@ -27,7 +25,8 @@ import org.springframework.kafka.listener.MessageListenerContainer; import org.springframework.stereotype.Component; -// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in case of +// We don't disable this on GMS since we want GMS to also wait until the system is ready to read in +// case of // backwards incompatible query logic dependent on system updates. @Component("dataHubUpgradeKafkaListener") @Slf4j @@ -36,14 +35,17 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap private final KafkaListenerEndpointRegistry registry; - private static final String CONSUMER_GROUP = "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; + private static final String CONSUMER_GROUP = + "${DATAHUB_UPGRADE_HISTORY_KAFKA_CONSUMER_GROUP_ID:generic-duhe-consumer-job-client}"; private static final String SUFFIX = "temp"; - public static final String TOPIC_NAME = "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; + public static final String TOPIC_NAME = + "${DATAHUB_UPGRADE_HISTORY_TOPIC_NAME:" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME + "}"; private final DefaultKafkaConsumerFactory _defaultKafkaConsumerFactory; @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") private String revision; + private final GitVersion _gitVersion; private final ConfigurationProvider _configurationProvider; @@ -53,35 +55,48 @@ public class DataHubUpgradeKafkaListener implements ConsumerSeekAware, Bootstrap @Value(TOPIC_NAME) private String topicName; - private final static AtomicBoolean IS_UPDATED = new AtomicBoolean(false); + private static final AtomicBoolean IS_UPDATED = new AtomicBoolean(false); - public DataHubUpgradeKafkaListener(KafkaListenerEndpointRegistry registry, - @Qualifier("duheKafkaConsumerFactory") DefaultKafkaConsumerFactory defaultKafkaConsumerFactory, - GitVersion gitVersion, - ConfigurationProvider configurationProvider) { + public DataHubUpgradeKafkaListener( + KafkaListenerEndpointRegistry registry, + @Qualifier("duheKafkaConsumerFactory") + DefaultKafkaConsumerFactory defaultKafkaConsumerFactory, + GitVersion gitVersion, + ConfigurationProvider configurationProvider) { this.registry = registry; this._defaultKafkaConsumerFactory = defaultKafkaConsumerFactory; this._gitVersion = gitVersion; this._configurationProvider = configurationProvider; } - // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic to get the latest version + // Constructs a consumer to read determine final offset to assign, prevents re-reading whole topic + // to get the latest version @Override - public void onPartitionsAssigned(Map assignments, ConsumerSeekCallback callback) { + public void onPartitionsAssigned( + Map assignments, ConsumerSeekCallback callback) { try (Consumer kafkaConsumer = _defaultKafkaConsumerFactory.createConsumer(consumerGroup, SUFFIX)) { final Map offsetMap = kafkaConsumer.endOffsets(assignments.keySet()); assignments.entrySet().stream() .filter(entry -> topicName.equals(entry.getKey().topic())) - .forEach(entry -> { - log.info("Partition: {} Current Offset: {}", entry.getKey(), offsetMap.get(entry.getKey())); - long newOffset = offsetMap.get(entry.getKey()) - 1; - callback.seek(entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); - }); + .forEach( + entry -> { + log.info( + "Partition: {} Current Offset: {}", + entry.getKey(), + offsetMap.get(entry.getKey())); + long newOffset = offsetMap.get(entry.getKey()) - 1; + callback.seek( + entry.getKey().topic(), entry.getKey().partition(), Math.max(0, newOffset)); + }); } } - @KafkaListener(id = CONSUMER_GROUP, topics = {TOPIC_NAME}, containerFactory = "duheKafkaEventConsumer", concurrency = "1") + @KafkaListener( + id = CONSUMER_GROUP, + topics = {TOPIC_NAME}, + containerFactory = "duheKafkaEventConsumer", + concurrency = "1") public void checkSystemVersion(final ConsumerRecord consumerRecord) { final GenericRecord record = consumerRecord.value(); final String expectedVersion = String.format("%s-%s", _gitVersion.getVersion(), revision); @@ -96,7 +111,9 @@ public void checkSystemVersion(final ConsumerRecord consu log.warn("Wait for system update is disabled. Proceeding with startup."); IS_UPDATED.getAndSet(true); } else { - log.warn("System version is not up to date: {}. Waiting for datahub-upgrade to complete...", expectedVersion); + log.warn( + "System version is not up to date: {}. Waiting for datahub-upgrade to complete...", + expectedVersion); } } catch (Exception e) { @@ -113,15 +130,19 @@ public void waitForUpdate() { IS_UPDATED.getAndSet(true); } int maxBackOffs = Integer.parseInt(_configurationProvider.getSystemUpdate().getMaxBackOffs()); - long initialBackOffMs = Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); - int backOffFactor = Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); + long initialBackOffMs = + Long.parseLong(_configurationProvider.getSystemUpdate().getInitialBackOffMs()); + int backOffFactor = + Integer.parseInt(_configurationProvider.getSystemUpdate().getBackOffFactor()); long backOffMs = initialBackOffMs; for (int i = 0; i < maxBackOffs; i++) { if (IS_UPDATED.get()) { log.debug("Finished waiting for updated indices."); try { - log.info("Containers: {}", registry.getListenerContainers().stream() + log.info( + "Containers: {}", + registry.getListenerContainers().stream() .map(MessageListenerContainer::getListenerId) .collect(Collectors.toList())); registry.getListenerContainer(consumerGroup).stop(); @@ -142,8 +163,9 @@ public void waitForUpdate() { if (!IS_UPDATED.get()) { - throw new IllegalStateException("Indices are not updated after exponential backoff." - + " Please try restarting and consider increasing back off settings."); + throw new IllegalStateException( + "Indices are not updated after exponential backoff." + + " Please try restarting and consider increasing back off settings."); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java index 408b212d52f48..e631f776abd08 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHEDeserializer.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; +import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; @@ -7,61 +10,55 @@ import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroDeserializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; -import static com.linkedin.metadata.boot.kafka.MockDUHESerializer.topicToSubjectName; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHEDeserializer extends KafkaAvroDeserializer { - private String topicName; + private String topicName; - public MockDUHEDeserializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - public MockDUHEDeserializer(SchemaRegistryClient client, Map props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } + public MockDUHEDeserializer(SchemaRegistryClient client, Map props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } - @Override - public void configure(Map configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } + @Override + public void configure(Map configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient2(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { - /** - * Previously used topics can have schema ids > 1 which fully match - * however we are replacing that registry so force schema id to 1 - */ - @Override - public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { - return super.getSchemaById(1); - } + public static class MockSchemaRegistryClient2 extends MockSchemaRegistryClient { + /** + * Previously used topics can have schema ids > 1 which fully match however we are replacing + * that registry so force schema id to 1 + */ + @Override + public synchronized ParsedSchema getSchemaById(int id) throws IOException, RestClientException { + return super.getSchemaById(1); } + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java index 1421f952289b3..36fe514d5536f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/kafka/MockDUHESerializer.java @@ -1,60 +1,57 @@ package com.linkedin.metadata.boot.kafka; +import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; + import com.linkedin.metadata.EventUtils; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import io.confluent.kafka.serializers.KafkaAvroSerializer; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.util.Map; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.gms.factory.kafka.schemaregistry.DUHESchemaRegistryFactory.DUHE_SCHEMA_REGISTRY_TOPIC_KEY; - -/** - * Used for early bootstrap to avoid contact with not yet existing schema registry - */ +/** Used for early bootstrap to avoid contact with not yet existing schema registry */ @Slf4j public class MockDUHESerializer extends KafkaAvroSerializer { - private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; - - private String topicName; - - public MockDUHESerializer() { - this.schemaRegistry = buildMockSchemaRegistryClient(); + private static final String DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX = "-value"; + + private String topicName; + + public MockDUHESerializer() { + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client) { + super(client); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + public MockDUHESerializer(SchemaRegistryClient client, Map props) { + super(client, props); + this.schemaRegistry = buildMockSchemaRegistryClient(); + } + + @Override + public void configure(Map configs, boolean isKey) { + super.configure(configs, isKey); + topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); + } + + private MockSchemaRegistryClient buildMockSchemaRegistryClient() { + MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); + try { + schemaRegistry.register( + topicToSubjectName(topicName), new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); + return schemaRegistry; + } catch (IOException | RestClientException e) { + throw new RuntimeException(e); } + } - public MockDUHESerializer(SchemaRegistryClient client) { - super(client); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - public MockDUHESerializer(SchemaRegistryClient client, Map props) { - super(client, props); - this.schemaRegistry = buildMockSchemaRegistryClient(); - } - - @Override - public void configure(Map configs, boolean isKey) { - super.configure(configs, isKey); - topicName = configs.get(DUHE_SCHEMA_REGISTRY_TOPIC_KEY).toString(); - } - - private MockSchemaRegistryClient buildMockSchemaRegistryClient() { - MockSchemaRegistryClient schemaRegistry = new MockSchemaRegistryClient(); - try { - schemaRegistry.register(topicToSubjectName(topicName), - new AvroSchema(EventUtils.ORIGINAL_DUHE_AVRO_SCHEMA)); - return schemaRegistry; - } catch (IOException | RestClientException e) { - throw new RuntimeException(e); - } - } - - public static String topicToSubjectName(String topicName) { - return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; - } + public static String topicToSubjectName(String topicName) { + return topicName + DATAHUB_UPGRADE_HISTORY_EVENT_SUBJECT_SUFFIX; + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java index ea9ac57778550..770c0d2840fe8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -21,28 +23,24 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class BackfillBrowsePathsV2Step extends UpgradeStep { - private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final String VERSION = "2"; private static final String UPGRADE_ID = "backfill-default-browse-paths-v2-step"; private static final Integer BATCH_SIZE = 5000; @@ -63,14 +61,18 @@ public ExecutionMode getExecutionMode() { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -78,7 +80,7 @@ public void upgrade() throws Exception { } private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) - throws Exception { + throws Exception { // Condition: has `browsePaths` AND does NOT have `browsePathV2` Criterion missingBrowsePathV2 = new Criterion(); @@ -102,16 +104,9 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S Filter filter = new Filter(); filter.setOr(conjunctiveCriterionArray); - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - "5m", - BATCH_SIZE, - null - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), "*", filter, null, scrollId, "5m", BATCH_SIZE, null); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -121,7 +116,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -136,12 +135,9 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java index b26eb67465c0d..c46cfdd61158d 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IndexDataPlatformsStep.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.search.EntitySearchService; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -25,7 +24,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IndexDataPlatformsStep extends UpgradeStep { private static final String VERSION = "1"; @@ -35,7 +33,9 @@ public class IndexDataPlatformsStep extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public IndexDataPlatformsStep(EntityService entityService, EntitySearchService entitySearchService, + public IndexDataPlatformsStep( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -44,11 +44,15 @@ public IndexDataPlatformsStep(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec dataPlatformSpec = _entityRegistry.getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) - .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); + final AspectSpec dataPlatformSpec = + _entityRegistry + .getEntitySpec(Constants.DATA_PLATFORM_ENTITY_NAME) + .getAspectSpec(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); getAndReIndexDataPlatforms(auditStamp, dataPlatformSpec); @@ -61,8 +65,8 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) - throws Exception { + private int getAndReIndexDataPlatforms( + AuditStamp auditStamp, AspectSpec dataPlatformInfoAspectSpec) throws Exception { ListUrnsResult listResult = _entityService.listUrns(Constants.DATA_PLATFORM_ENTITY_NAME, 0, BATCH_SIZE); @@ -73,9 +77,10 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla } final Map dataPlatformInfoResponses = - _entityService.getEntitiesV2(Constants.DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), - Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + Collections.singleton(Constants.DATA_PLATFORM_INFO_ASPECT_NAME)); // Loop over Data platforms and produce changelog List> futures = new LinkedList<>(); @@ -92,26 +97,32 @@ private int getAndReIndexDataPlatforms(AuditStamp auditStamp, AspectSpec dataPla continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - dpUrn, - Constants.DATA_PLATFORM_ENTITY_NAME, - Constants.DATA_PLATFORM_INFO_ASPECT_NAME, - dataPlatformInfoAspectSpec, - null, - dpInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + dpUrn, + Constants.DATA_PLATFORM_ENTITY_NAME, + Constants.DATA_PLATFORM_INFO_ASPECT_NAME, + dataPlatformInfoAspectSpec, + null, + dpInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return listResult.getTotal(); } @@ -122,6 +133,7 @@ private DataPlatformInfo mapDpInfo(EntityResponse entityResponse) { return null; } - return new DataPlatformInfo(aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); + return new DataPlatformInfo( + aspectMap.get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME).getValue().data()); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java index 30608e984a0f2..ae4baee37c822 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; @@ -13,16 +15,12 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.utils.DataPlatformInstanceUtils; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Optional; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -62,7 +60,9 @@ public void execute() throws Exception { int start = 0; while (start < numEntities) { - log.info("Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", start, + log.info( + "Reading urns {} to {} from the aspects table to generate dataplatform instance aspects", + start, start + BATCH_SIZE); List items = new LinkedList<>(); @@ -71,7 +71,8 @@ public void execute() throws Exception { Urn urn = Urn.createFromString(urnStr); Optional dataPlatformInstance = getDataPlatformInstance(urn); if (dataPlatformInstance.isPresent()) { - items.add(UpsertBatchItem.builder() + items.add( + UpsertBatchItem.builder() .urn(urn) .aspectName(DATA_PLATFORM_INSTANCE_ASPECT_NAME) .aspect(dataPlatformInstance.get()) @@ -80,10 +81,14 @@ public void execute() throws Exception { } final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - _entityService.ingestAspects(AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); - - log.info("Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(items).build(), aspectAuditStamp, true, true); + + log.info( + "Finished ingesting DataPlatformInstance for urn {} to {}", start, start + BATCH_SIZE); start += BATCH_SIZE; } log.info("Finished ingesting DataPlatformInstance for all entities"); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java index e4ad215eec864..db8cad65caa8a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDataPlatformsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -10,6 +12,8 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.boot.BootstrapStep; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; @@ -17,16 +21,10 @@ import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestDataPlatformsStep implements BootstrapStep { @@ -44,45 +42,60 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode dataPlatforms = mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); + final JsonNode dataPlatforms = + mapper.readTree(new ClassPathResource("./boot/data_platforms.json").getFile()); if (!dataPlatforms.isArray()) { - throw new RuntimeException(String.format("Found malformed data platforms file, expected an Array but found %s", - dataPlatforms.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed data platforms file, expected an Array but found %s", + dataPlatforms.getNodeType())); } // 2. For each JSON object, cast into a DataPlatformSnapshot object. - List dataPlatformAspects = StreamSupport.stream( - Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), false) - .map(dataPlatform -> { - final String urnString; - final Urn urn; - try { - urnString = dataPlatform.get("urn").asText(); - urn = Urn.createFromString(urnString); - } catch (URISyntaxException e) { - log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); - throw new RuntimeException("Malformed urn", e); - } - - final DataPlatformInfo info = - RecordUtils.toRecordTemplate(DataPlatformInfo.class, dataPlatform.get("aspect").toString()); - - return UpsertBatchItem.builder() + List dataPlatformAspects = + StreamSupport.stream( + Spliterators.spliteratorUnknownSize(dataPlatforms.iterator(), Spliterator.ORDERED), + false) + .map( + dataPlatform -> { + final String urnString; + final Urn urn; + try { + urnString = dataPlatform.get("urn").asText(); + urn = Urn.createFromString(urnString); + } catch (URISyntaxException e) { + log.error("Malformed urn: {}", dataPlatform.get("urn").asText()); + throw new RuntimeException("Malformed urn", e); + } + + final DataPlatformInfo info = + RecordUtils.toRecordTemplate( + DataPlatformInfo.class, dataPlatform.get("aspect").toString()); + + return UpsertBatchItem.builder() .urn(urn) .aspectName(PLATFORM_ASPECT_NAME) .aspect(info) .build(_entityService.getEntityRegistry()); - }).collect(Collectors.toList()); - - _entityService.ingestAspects(AspectsBatchImpl.builder().items(dataPlatformAspects).build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - true, - false); + }) + .collect(Collectors.toList()); + + _entityService.ingestAspects( + AspectsBatchImpl.builder().items(dataPlatformAspects).build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + true, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java index 5bc80f46e6478..0b812a6f818f4 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -28,14 +30,12 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - /** - * This bootstrap step is responsible for ingesting a default Global Settings object if it does not already exist. + * This bootstrap step is responsible for ingesting a default Global Settings object if it does not + * already exist. * - * If settings already exist, we merge the defaults and the existing settings such that the container will also - * get new settings when they are added. + *

If settings already exist, we merge the defaults and the existing settings such that the + * container will also get new settings when they are added. */ @Slf4j public class IngestDefaultGlobalSettingsStep implements BootstrapStep { @@ -49,8 +49,7 @@ public IngestDefaultGlobalSettingsStep(@Nonnull final EntityService entityServic } public IngestDefaultGlobalSettingsStep( - @Nonnull final EntityService entityService, - @Nonnull final String resourcePath) { + @Nonnull final EntityService entityService, @Nonnull final String resourcePath) { _entityService = Objects.requireNonNull(entityService); _resourcePath = Objects.requireNonNull(resourcePath); } @@ -64,9 +63,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); log.info("Ingesting default global settings..."); @@ -76,37 +79,45 @@ public void execute() throws IOException, URISyntaxException { defaultSettingsObj = mapper.readTree(new ClassPathResource(_resourcePath).getFile()); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to parse global settings file. Could not parse valid json at resource path %s", - _resourcePath), - e); + String.format( + "Failed to parse global settings file. Could not parse valid json at resource path %s", + _resourcePath), + e); } if (!defaultSettingsObj.isObject()) { - throw new RuntimeException(String.format("Found malformed global settings info file, expected an Object but found %s", - defaultSettingsObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed global settings info file, expected an Object but found %s", + defaultSettingsObj.getNodeType())); } // 2. Bind the global settings json into a GlobalSettingsInfo aspect. GlobalSettingsInfo defaultSettings; - defaultSettings = RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); - ValidationResult result = ValidateDataAgainstSchema.validate( - defaultSettings, - new ValidationOptions( - RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, - CoercionMode.NORMAL, - UnrecognizedFieldMode.DISALLOW - )); + defaultSettings = + RecordUtils.toRecordTemplate(GlobalSettingsInfo.class, defaultSettingsObj.toString()); + ValidationResult result = + ValidateDataAgainstSchema.validate( + defaultSettings, + new ValidationOptions( + RequiredMode.CAN_BE_ABSENT_IF_HAS_DEFAULT, + CoercionMode.NORMAL, + UnrecognizedFieldMode.DISALLOW)); if (!result.isValid()) { - throw new RuntimeException(String.format( - "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", result.getMessages())); + throw new RuntimeException( + String.format( + "Failed to parse global settings file. Provided JSON does not match GlobalSettingsInfo.pdl model. %s", + result.getMessages())); } // 3. Get existing settings or empty settings object final GlobalSettingsInfo existingSettings = getExistingGlobalSettingsOrEmpty(); - // 4. Merge existing settings onto previous settings. Be careful - if we change the settings schema dramatically in future we may need to account for that. - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); + // 4. Merge existing settings onto previous settings. Be careful - if we change the settings + // schema dramatically in future we may need to account for that. + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo(mergeDataMaps(defaultSettings.data(), existingSettings.data())); // 5. Ingest into DataHub. final MetadataChangeProposal proposal = new MetadataChangeProposal(); @@ -118,12 +129,15 @@ public void execute() throws IOException, URISyntaxException { _entityService.ingestProposal( proposal, - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), false); } - private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { - RecordTemplate aspect = _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); + private GlobalSettingsInfo getExistingGlobalSettingsOrEmpty() { + RecordTemplate aspect = + _entityService.getAspect(GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, 0); return aspect != null ? (GlobalSettingsInfo) aspect : new GlobalSettingsInfo(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java index 6d64ceea32339..f5a76b5f75778 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestOwnershipTypesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -16,22 +18,16 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.ownership.OwnershipTypeInfo; - +import java.util.List; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; -import java.util.List; - -import static com.linkedin.metadata.Constants.*; - - /** * This bootstrap step is responsible for ingesting default ownership types. - *

- * If system has never bootstrapped this step will: - * For each ownership type defined in the yaml file, it checks whether the urn exists. - * If not, it ingests the ownership type into DataHub. + * + *

If system has never bootstrapped this step will: For each ownership type defined in the yaml + * file, it checks whether the urn exists. If not, it ingests the ownership type into DataHub. */ @Slf4j @RequiredArgsConstructor @@ -54,19 +50,23 @@ public void execute() throws Exception { final JsonNode ownershipTypesObj = JSON_MAPPER.readTree(_ownershipTypesResource.getFile()); if (!ownershipTypesObj.isArray()) { - throw new RuntimeException(String.format("Found malformed ownership file, expected an Array but found %s", - ownershipTypesObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed ownership file, expected an Array but found %s", + ownershipTypesObj.getNodeType())); } final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); log.info("Ingesting {} ownership types", ownershipTypesObj.size()); int numIngested = 0; for (final JsonNode roleObj : ownershipTypesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); - final OwnershipTypeInfo info = RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info") - .toString()); + final OwnershipTypeInfo info = + RecordUtils.toRecordTemplate(OwnershipTypeInfo.class, roleObj.get("info").toString()); log.info(String.format("Ingesting default ownership type with urn %s", urn)); ingestOwnershipType(urn, info, auditStamp); numIngested++; @@ -74,13 +74,15 @@ public void execute() throws Exception { log.info("Ingested {} new ownership types", numIngested); } - private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { + private void ingestOwnershipType( + final Urn ownershipTypeUrn, final OwnershipTypeInfo info, final AuditStamp auditStamp) { // 3. Write key & aspect MCPs. final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(ownershipTypeUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(ownershipTypeUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(OWNERSHIP_TYPE_ENTITY_NAME); @@ -96,8 +98,11 @@ private void ingestOwnershipType(final Urn ownershipTypeUrn, final OwnershipType proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()).build(), auditStamp, - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityService.getEntityRegistry()) + .build(), + auditStamp, + false); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index cf29645214466..2aa5fe4f46b65 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,7 +27,6 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.policy.DataHubPolicyInfo; - import java.io.IOException; import java.net.URISyntaxException; import java.util.Collections; @@ -33,15 +34,10 @@ import java.util.List; import java.util.Map; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.Resource; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestPoliciesStep implements BootstrapStep { @@ -65,9 +61,13 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 0. Execute preflight check to see whether we need to ingest policies log.info("Ingesting default access policies from: {}...", _policiesResource); @@ -77,14 +77,17 @@ public void execute() throws IOException, URISyntaxException { if (!policiesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed policies file, expected an Array but found %s", policiesObj.getNodeType())); + String.format( + "Found malformed policies file, expected an Array but found %s", + policiesObj.getNodeType())); } // 2. For each JSON object, cast into a DataHub Policy Info object. for (final JsonNode policyObj : policiesObj) { final Urn urn = Urn.createFromString(policyObj.get("urn").asText()); - // If the info is not there, it means that the policy was there before, but must now be removed + // If the info is not there, it means that the policy was there before, but must now be + // removed if (!policyObj.has("info")) { _entityService.deleteUrn(urn); continue; @@ -107,7 +110,8 @@ public void execute() throws IOException, URISyntaxException { } } } - // If search index for policies is empty, update the policy index with the ingested policies from previous step. + // If search index for policies is empty, update the policy index with the ingested policies + // from previous step. // Directly update the ES index, does not produce MCLs if (_entitySearchService.docCount(Constants.POLICY_ENTITY_NAME) == 0) { updatePolicyIndex(); @@ -115,31 +119,37 @@ public void execute() throws IOException, URISyntaxException { log.info("Successfully ingested default access policies."); } - /** - * Update policy index and push in the relevant search documents into the search index - */ + /** Update policy index and push in the relevant search documents into the search index */ private void updatePolicyIndex() throws URISyntaxException { log.info("Pushing documents to the policy index"); - AspectSpec policyInfoAspectSpec = _entityRegistry.getEntitySpec(Constants.POLICY_ENTITY_NAME) - .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + AspectSpec policyInfoAspectSpec = + _entityRegistry + .getEntitySpec(Constants.POLICY_ENTITY_NAME) + .getAspectSpec(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); int start = 0; int count = 30; int total = 100; while (start < total) { - ListUrnsResult listUrnsResult = _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); + ListUrnsResult listUrnsResult = + _entityService.listUrns(Constants.POLICY_ENTITY_NAME, start, count); total = listUrnsResult.getTotal(); start = start + count; final Map policyEntities = - _entityService.getEntitiesV2(POLICY_ENTITY_NAME, new HashSet<>(listUrnsResult.getEntities()), + _entityService.getEntitiesV2( + POLICY_ENTITY_NAME, + new HashSet<>(listUrnsResult.getEntities()), Collections.singleton(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME)); - policyEntities.values().forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); + policyEntities + .values() + .forEach(entityResponse -> insertPolicyDocument(entityResponse, policyInfoAspectSpec)); } log.info("Successfully updated the policy index"); } private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspectSpec) { - EnvelopedAspect aspect = entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); + EnvelopedAspect aspect = + entityResponse.getAspects().get(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME); if (aspect == null) { log.info("Missing policy info aspect for urn {}", entityResponse.getUrn()); return; @@ -147,10 +157,15 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe Optional searchDocument; try { - searchDocument = _searchDocumentTransformer.transformAspect(entityResponse.getUrn(), - new DataHubPolicyInfo(aspect.getValue().data()), aspectSpec, false); + searchDocument = + _searchDocumentTransformer.transformAspect( + entityResponse.getUrn(), + new DataHubPolicyInfo(aspect.getValue().data()), + aspectSpec, + false); } catch (Exception e) { - log.error("Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); + log.error( + "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); return; } @@ -164,7 +179,8 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe return; } - _entitySearchService.upsertDocument(Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); + _entitySearchService.upsertDocument( + Constants.POLICY_ENTITY_NAME, searchDocument.get(), docId.get()); } private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws URISyntaxException { @@ -172,7 +188,8 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(urn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(POLICY_ENTITY_NAME); @@ -186,11 +203,14 @@ private void ingestPolicy(final Urn urn, final DataHubPolicyInfo info) throws UR proposal.setAspect(GenericRecordUtils.serializeAspect(info)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) - .build(), - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } private boolean hasPolicy(Urn policyUrn) { diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java index 9aed445a967b3..b24acc61ff6c1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRetentionPoliciesStep.java @@ -1,12 +1,14 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RetentionService; import com.linkedin.metadata.key.DataHubRetentionKey; @@ -22,9 +24,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRetentionPoliciesStep implements BootstrapStep { @@ -36,10 +35,17 @@ public class IngestRetentionPoliciesStep implements BootstrapStep { private final String pluginPath; private static final ObjectMapper YAML_MAPPER = new ObjectMapper(new YAMLFactory()); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - YAML_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + YAML_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String UPGRADE_ID = "ingest-retention-policies"; private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); @@ -80,7 +86,8 @@ public void execute() throws IOException, URISyntaxException { log.info("Setting {} policies", retentionPolicyMap.size()); boolean hasUpdate = false; for (DataHubRetentionKey key : retentionPolicyMap.keySet()) { - if (_retentionService.setRetention(key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { + if (_retentionService.setRetention( + key.getEntityName(), key.getAspectName(), retentionPolicyMap.get(key))) { hasUpdate = true; } } @@ -95,7 +102,8 @@ public void execute() throws IOException, URISyntaxException { } // Parse input yaml file or yaml files in the input directory to generate a retention policy map - private Map parseFileOrDir(File retentionFileOrDir) throws IOException { + private Map parseFileOrDir(File retentionFileOrDir) + throws IOException { // If path does not exist return empty if (!retentionFileOrDir.exists()) { return Collections.emptyMap(); @@ -107,7 +115,9 @@ private Map parseFileOrDir(File ret for (File retentionFile : retentionFileOrDir.listFiles()) { if (!retentionFile.isFile()) { - log.info("Element {} in plugin directory {} is not a file. Skipping", retentionFile.getPath(), + log.info( + "Element {} in plugin directory {} is not a file. Skipping", + retentionFile.getPath(), retentionFileOrDir.getPath()); continue; } @@ -116,7 +126,8 @@ private Map parseFileOrDir(File ret return result; } // If file, parse the yaml file and return result; - if (!retentionFileOrDir.getPath().endsWith(".yaml") && retentionFileOrDir.getPath().endsWith(".yml")) { + if (!retentionFileOrDir.getPath().endsWith(".yaml") + && retentionFileOrDir.getPath().endsWith(".yml")) { log.info("File {} is not a YAML file. Skipping", retentionFileOrDir.getPath()); return Collections.emptyMap(); } @@ -126,15 +137,16 @@ private Map parseFileOrDir(File ret /** * Parse yaml retention config * - * The structure of yaml must be a list of retention policies where each element specifies the entity, aspect - * to apply the policy to and the policy definition. The policy definition is converted into the - * {@link com.linkedin.retention.DataHubRetentionConfig} class. + *

The structure of yaml must be a list of retention policies where each element specifies the + * entity, aspect to apply the policy to and the policy definition. The policy definition is + * converted into the {@link com.linkedin.retention.DataHubRetentionConfig} class. */ - private Map parseYamlRetentionConfig(File retentionConfigFile) - throws IOException { + private Map parseYamlRetentionConfig( + File retentionConfigFile) throws IOException { final JsonNode retentionPolicies = YAML_MAPPER.readTree(retentionConfigFile); if (!retentionPolicies.isArray()) { - throw new IllegalArgumentException("Retention config file must contain an array of retention policies"); + throw new IllegalArgumentException( + "Retention config file must contain an array of retention policies"); } Map retentionPolicyMap = new HashMap<>(); @@ -158,9 +170,11 @@ private Map parseYamlRetentionConfi DataHubRetentionConfig retentionInfo; if (retentionPolicy.has("config")) { retentionInfo = - RecordUtils.toRecordTemplate(DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); + RecordUtils.toRecordTemplate( + DataHubRetentionConfig.class, retentionPolicy.get("config").toString()); } else { - throw new IllegalArgumentException("Each element in the retention config must contain field config"); + throw new IllegalArgumentException( + "Each element in the retention config must contain field config"); } retentionPolicyMap.put(key, retentionInfo); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java index 99be185113968..f3c395abdfc3a 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRolesStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -25,9 +27,6 @@ import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRolesStep implements BootstrapStep { @@ -49,9 +48,13 @@ public ExecutionMode getExecutionMode() { @Override public void execute() throws Exception { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // Sleep to ensure deployment process finishes. Thread.sleep(SLEEP_SECONDS * 1000); @@ -64,13 +67,19 @@ public void execute() throws Exception { if (!rolesObj.isArray()) { throw new RuntimeException( - String.format("Found malformed roles file, expected an Array but found %s", rolesObj.getNodeType())); + String.format( + "Found malformed roles file, expected an Array but found %s", + rolesObj.getNodeType())); } final AspectSpec roleInfoAspectSpec = - _entityRegistry.getEntitySpec(DATAHUB_ROLE_ENTITY_NAME).getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); + _entityRegistry + .getEntitySpec(DATAHUB_ROLE_ENTITY_NAME) + .getAspectSpec(DATAHUB_ROLE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); for (final JsonNode roleObj : rolesObj) { final Urn urn = Urn.createFromString(roleObj.get("urn").asText()); @@ -81,20 +90,26 @@ public void execute() throws Exception { continue; } - final DataHubRoleInfo info = RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); + final DataHubRoleInfo info = + RecordUtils.toRecordTemplate(DataHubRoleInfo.class, roleObj.get("info").toString()); ingestRole(urn, info, auditStamp, roleInfoAspectSpec); } log.info("Successfully ingested default Roles."); } - private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo, final AuditStamp auditStamp, - final AspectSpec roleInfoAspectSpec) throws URISyntaxException { + private void ingestRole( + final Urn roleUrn, + final DataHubRoleInfo dataHubRoleInfo, + final AuditStamp auditStamp, + final AspectSpec roleInfoAspectSpec) + throws URISyntaxException { // 3. Write key & aspect final MetadataChangeProposal keyAspectProposal = new MetadataChangeProposal(); final AspectSpec keyAspectSpec = _entityService.getKeyAspectSpec(roleUrn); GenericAspect aspect = - GenericRecordUtils.serializeAspect(EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); + GenericRecordUtils.serializeAspect( + EntityKeyUtils.convertUrnToEntityKey(roleUrn, keyAspectSpec)); keyAspectProposal.setAspect(aspect); keyAspectProposal.setAspectName(keyAspectSpec.getName()); keyAspectProposal.setEntityType(DATAHUB_ROLE_ENTITY_NAME); @@ -108,12 +123,25 @@ private void ingestRole(final Urn roleUrn, final DataHubRoleInfo dataHubRoleInfo proposal.setAspect(GenericRecordUtils.serializeAspect(dataHubRoleInfo)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(AspectsBatchImpl.builder() - .mcps(List.of(keyAspectProposal, proposal), _entityRegistry).build(), - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), - false); - - _entityService.alwaysProduceMCLAsync(roleUrn, DATAHUB_ROLE_ENTITY_NAME, DATAHUB_ROLE_INFO_ASPECT_NAME, - roleInfoAspectSpec, null, dataHubRoleInfo, null, null, auditStamp, ChangeType.RESTATE); + _entityService.ingestProposal( + AspectsBatchImpl.builder() + .mcps(List.of(keyAspectProposal, proposal), _entityRegistry) + .build(), + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); + + _entityService.alwaysProduceMCLAsync( + roleUrn, + DATAHUB_ROLE_ENTITY_NAME, + DATAHUB_ROLE_INFO_ASPECT_NAME, + roleInfoAspectSpec, + null, + dataHubRoleInfo, + null, + null, + auditStamp, + ChangeType.RESTATE); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java index febcb9d4ec8a4..9e00b960482c5 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestRootUserStep.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.util.RecordUtils; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -7,25 +10,19 @@ import com.linkedin.common.urn.Urn; import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.boot.BootstrapStep; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.EntityService; - import com.linkedin.metadata.key.CorpUserKey; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.util.Pair; import java.io.IOException; import java.net.URISyntaxException; import java.util.List; - -import com.linkedin.util.Pair; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.core.io.ClassPathResource; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class IngestRootUserStep implements BootstrapStep { @@ -43,16 +40,23 @@ public String name() { public void execute() throws IOException, URISyntaxException { final ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); // 1. Read from the file into JSON. - final JsonNode userObj = mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); + final JsonNode userObj = + mapper.readTree(new ClassPathResource("./boot/root_user.json").getFile()); if (!userObj.isObject()) { - throw new RuntimeException(String.format("Found malformed root user file, expected an Object but found %s", - userObj.getNodeType())); + throw new RuntimeException( + String.format( + "Found malformed root user file, expected an Object but found %s", + userObj.getNodeType())); } // 2. Ingest the user info @@ -66,18 +70,22 @@ public void execute() throws IOException, URISyntaxException { final CorpUserInfo info = RecordUtils.toRecordTemplate(CorpUserInfo.class, userObj.get("info").toString()); - final CorpUserKey key = (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); + final CorpUserKey key = + (CorpUserKey) EntityKeyUtils.convertUrnToEntityKey(urn, getUserKeyAspectSpec()); final AuditStamp aspectAuditStamp = - new AuditStamp().setActor(Urn.createFromString(SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - _entityService.ingestAspects(urn, List.of( - Pair.of(CORP_USER_KEY_ASPECT_NAME, key), - Pair.of(USER_INFO_ASPECT_NAME, info) - ), aspectAuditStamp, null); + new AuditStamp() + .setActor(Urn.createFromString(SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + _entityService.ingestAspects( + urn, + List.of(Pair.of(CORP_USER_KEY_ASPECT_NAME, key), Pair.of(USER_INFO_ASPECT_NAME, info)), + aspectAuditStamp, + null); } private AspectSpec getUserKeyAspectSpec() { final EntitySpec spec = _entityService.getEntityRegistry().getEntitySpec(CORP_USER_ENTITY_NAME); return spec.getKeyAspectSpec(); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 34147b166ecd7..3c62f695ddd5f 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -9,7 +9,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements BootstrapStep { @@ -33,7 +32,8 @@ public void execute() throws Exception { return; } // Remove invalid telemetry aspect - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_TELEMETRY_ASPECT_NAME, new HashMap<>(), true); BootstrapStep.setUpgradeResult(REMOVE_UNKNOWN_ASPECTS_URN, _entityService); } catch (Exception e) { @@ -48,5 +48,4 @@ public void execute() throws Exception { public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java index 1f5f7f26ed89b..333928999f453 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndices.java @@ -13,14 +13,13 @@ import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.ExtraInfo; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.LinkedList; import java.util.List; import java.util.Objects; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class RestoreColumnLineageIndices extends UpgradeStep { @@ -30,7 +29,8 @@ public class RestoreColumnLineageIndices extends UpgradeStep { private final EntityRegistry _entityRegistry; - public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { + public RestoreColumnLineageIndices( + @Nonnull final EntityService entityService, @Nonnull final EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entityRegistry = Objects.requireNonNull(entityRegistry, "entityRegistry must not be null"); } @@ -38,7 +38,9 @@ public RestoreColumnLineageIndices(@Nonnull final EntityService entityService, @ @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalUpstreamLineageCount = getAndRestoreUpstreamLineageIndices(0, auditStamp); int upstreamLineageCount = BATCH_SIZE; @@ -47,17 +49,21 @@ public void upgrade() throws Exception { upstreamLineageCount += BATCH_SIZE; } - final int totalChartInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); + final int totalChartInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, 0, auditStamp); int chartInputFieldsCount = BATCH_SIZE; while (chartInputFieldsCount < totalChartInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.CHART_ENTITY_NAME, chartInputFieldsCount, auditStamp); chartInputFieldsCount += BATCH_SIZE; } - final int totalDashboardInputFieldsCount = getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); + final int totalDashboardInputFieldsCount = + getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, 0, auditStamp); int dashboardInputFieldsCount = BATCH_SIZE; while (dashboardInputFieldsCount < totalDashboardInputFieldsCount) { - getAndRestoreInputFieldsIndices(Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); + getAndRestoreInputFieldsIndices( + Constants.DASHBOARD_ENTITY_NAME, dashboardInputFieldsCount, auditStamp); dashboardInputFieldsCount += BATCH_SIZE; } } @@ -69,23 +75,29 @@ public ExecutionMode getExecutionMode() { } private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp) { - final AspectSpec upstreamLineageAspectSpec = _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME) - .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - - final ListResult latestAspects = _entityService.listLatestAspects( - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - start, - BATCH_SIZE); - - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + final AspectSpec upstreamLineageAspectSpec = + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + + final ListResult latestAspects = + _entityService.listLatestAspects( + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + start, + BATCH_SIZE); + + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 upstreamLineage aspects for datasets. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of upstreamLineages. - log.warn("Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match upstreamLineage aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -104,48 +116,56 @@ private int getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - Constants.DATASET_ENTITY_NAME, - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamLineageAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + Constants.DATASET_ENTITY_NAME, + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamLineageAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } - private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) throws Exception { - final AspectSpec inputFieldsAspectSpec = _entityRegistry.getEntitySpec(entityName) - .getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); + private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditStamp auditStamp) + throws Exception { + final AspectSpec inputFieldsAspectSpec = + _entityRegistry.getEntitySpec(entityName).getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME); - final ListResult latestAspects = _entityService.listLatestAspects( - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult latestAspects = + _entityService.listLatestAspects( + entityName, Constants.INPUT_FIELDS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { log.debug("Found 0 inputFields aspects. Skipping migration."); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of inputFields. - log.warn("Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + log.warn( + "Failed to match inputFields aspects with corresponding urns. Found mismatched length between aspects ({})" + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), @@ -164,26 +184,32 @@ private int getAndRestoreInputFieldsIndices(String entityName, int start, AuditS continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - urn, - entityName, - Constants.INPUT_FIELDS_ASPECT_NAME, - inputFieldsAspectSpec, - null, - inputFields, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + urn, + entityName, + Constants.INPUT_FIELDS_ASPECT_NAME, + inputFieldsAspectSpec, + null, + inputFields, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return latestAspects.getTotalCount(); } diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java index 355936fe1994c..bb7ad80ef73d2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreDbtSiblingsIndices.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -33,16 +35,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class RestoreDbtSiblingsIndices implements BootstrapStep { private static final String VERSION = "0"; private static final String UPGRADE_ID = "restore-dbt-siblings-indices"; private static final Urn SIBLING_UPGRADE_URN = - EntityKeyUtils.convertEntityKeyToUrn(new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); + EntityKeyUtils.convertEntityKeyToUrn( + new DataHubUpgradeKey().setId(UPGRADE_ID), Constants.DATA_HUB_UPGRADE_ENTITY_NAME); private static final Integer BATCH_SIZE = 1000; private static final Integer SLEEP_SECONDS = 120; @@ -65,12 +65,19 @@ public void execute() throws Exception { log.info("Attempting to run RestoreDbtSiblingsIndices upgrade.."); log.info(String.format("Waiting %s seconds..", SLEEP_SECONDS)); - EntityResponse response = _entityService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, SIBLING_UPGRADE_URN, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - ); - if (response != null && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { - DataMap dataMap = response.getAspects().get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME).getValue().data(); + EntityResponse response = + _entityService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + SIBLING_UPGRADE_URN, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)); + if (response != null + && response.getAspects().containsKey(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) { + DataMap dataMap = + response + .getAspects() + .get(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) + .getValue() + .data(); DataHubUpgradeRequest request = new DataHubUpgradeRequest(dataMap); if (request.hasVersion() && request.getVersion().equals(VERSION)) { log.info("RestoreDbtSiblingsIndices has run before with this version. Skipping"); @@ -89,11 +96,20 @@ public void execute() throws Exception { log.info("Found {} dataset entities to attempt to bootstrap", rowCount); final AspectSpec datasetAspectSpec = - _entityRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME).getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); - final AuditStamp auditStamp = new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - final DataHubUpgradeRequest upgradeRequest = new DataHubUpgradeRequest().setTimestampMs(System.currentTimeMillis()).setVersion(VERSION); - ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); + _entityRegistry + .getEntitySpec(Constants.DATASET_ENTITY_NAME) + .getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + final DataHubUpgradeRequest upgradeRequest = + new DataHubUpgradeRequest() + .setTimestampMs(System.currentTimeMillis()) + .setVersion(VERSION); + ingestUpgradeAspect( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, upgradeRequest, auditStamp); int indexedCount = 0; while (indexedCount < rowCount) { @@ -101,19 +117,23 @@ public void execute() throws Exception { indexedCount += BATCH_SIZE; } - final DataHubUpgradeResult upgradeResult = new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); + final DataHubUpgradeResult upgradeResult = + new DataHubUpgradeResult().setTimestampMs(System.currentTimeMillis()); ingestUpgradeAspect(Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, upgradeResult, auditStamp); log.info("Successfully restored sibling aspects"); } catch (Exception e) { log.error("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); _entityService.deleteUrn(SIBLING_UPGRADE_URN); - throw new RuntimeException("Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); + throw new RuntimeException( + "Error when running the RestoreDbtSiblingsIndices Bootstrap Step", e); } } - private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { - ListUrnsResult datasetUrnsResult = _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); + private void getAndRestoreUpstreamLineageIndices( + int start, AuditStamp auditStamp, AspectSpec upstreamAspectSpec) { + ListUrnsResult datasetUrnsResult = + _entityService.listUrns(DATASET_ENTITY_NAME, start, BATCH_SIZE); List datasetUrns = datasetUrnsResult.getEntities(); log.info("Re-indexing upstreamLineage aspect from {} with batch size {}", start, BATCH_SIZE); @@ -121,12 +141,16 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam return; } - final Map upstreamLineageResponse; + final Map upstreamLineageResponse; try { upstreamLineageResponse = - _entityService.getEntitiesV2(DATASET_ENTITY_NAME, new HashSet<>(datasetUrns), Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); + _entityService.getEntitiesV2( + DATASET_ENTITY_NAME, + new HashSet<>(datasetUrns), + Collections.singleton(UPSTREAM_LINEAGE_ASPECT_NAME)); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Error fetching upstream lineage history: %s", e.toString())); + throw new RuntimeException( + String.format("Error fetching upstream lineage history: %s", e.toString())); } // Loop over datasets and produce changelog @@ -142,26 +166,32 @@ private void getAndRestoreUpstreamLineageIndices(int start, AuditStamp auditStam continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - datasetUrn, - DATASET_ENTITY_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - upstreamAspectSpec, - null, - upstreamLineage, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + datasetUrn, + DATASET_ENTITY_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + upstreamAspectSpec, + null, + upstreamLineage, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); } private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { @@ -170,10 +200,12 @@ private UpstreamLineage getUpstreamLineage(EntityResponse entityResponse) { return null; } - return new UpstreamLineage(aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); + return new UpstreamLineage( + aspectMap.get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data()); } - private void ingestUpgradeAspect(String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { + private void ingestUpgradeAspect( + String aspectName, RecordTemplate aspect, AuditStamp auditStamp) { final MetadataChangeProposal upgradeProposal = new MetadataChangeProposal(); upgradeProposal.setEntityUrn(SIBLING_UPGRADE_URN); upgradeProposal.setEntityType(Constants.DATA_HUB_UPGRADE_ENTITY_NAME); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java index 4de2bea9a76a9..319bbd084e05c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndices.java @@ -16,7 +16,6 @@ import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; - import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; @@ -29,7 +28,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RestoreGlossaryIndices extends UpgradeStep { private static final String VERSION = "1"; @@ -39,7 +37,9 @@ public class RestoreGlossaryIndices extends UpgradeStep { private final EntitySearchService _entitySearchService; private final EntityRegistry _entityRegistry; - public RestoreGlossaryIndices(EntityService entityService, EntitySearchService entitySearchService, + public RestoreGlossaryIndices( + EntityService entityService, + EntitySearchService entitySearchService, EntityRegistry entityRegistry) { super(entityService, VERSION, UPGRADE_ID); _entitySearchService = entitySearchService; @@ -48,12 +48,18 @@ public RestoreGlossaryIndices(EntityService entityService, EntitySearchService e @Override public void upgrade() throws Exception { - final AspectSpec termAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); - final AspectSpec nodeAspectSpec = _entityRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) - .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); + final AspectSpec termAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + final AspectSpec nodeAspectSpec = + _entityRegistry + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME) + .getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME); final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); final int totalTermsCount = getAndRestoreTermAspectIndices(0, auditStamp, termAspectSpec); int termsCount = BATCH_SIZE; @@ -76,20 +82,29 @@ public ExecutionMode getExecutionMode() { return ExecutionMode.ASYNC; } - private int getAndRestoreTermAspectIndices(int start, AuditStamp auditStamp, AspectSpec termAspectSpec) - throws Exception { + private int getAndRestoreTermAspectIndices( + int start, AuditStamp auditStamp, AspectSpec termAspectSpec) throws Exception { SearchResult termsResult = - _entitySearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, - null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List termUrns = termsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + _entitySearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List termUrns = + termsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (termUrns.size() == 0) { return 0; } final Map termInfoResponses = - _entityService.getEntitiesV2(Constants.GLOSSARY_TERM_ENTITY_NAME, new HashSet<>(termUrns), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) - ); + _entityService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(termUrns), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)); // Loop over Terms and produce changelog List> futures = new LinkedList<>(); @@ -105,43 +120,59 @@ null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - termUrn, - Constants.GLOSSARY_TERM_ENTITY_NAME, - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - termAspectSpec, - null, - termInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + termUrn, + Constants.GLOSSARY_TERM_ENTITY_NAME, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + termAspectSpec, + null, + termInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return termsResult.getNumEntities(); } - private int getAndRestoreNodeAspectIndices(int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { - SearchResult nodesResult = _entitySearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", - null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - List nodeUrns = nodesResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList()); + private int getAndRestoreNodeAspectIndices( + int start, AuditStamp auditStamp, AspectSpec nodeAspectSpec) throws Exception { + SearchResult nodesResult = + _entitySearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + start, + BATCH_SIZE, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + List nodeUrns = + nodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); if (nodeUrns.size() == 0) { return 0; } - final Map nodeInfoResponses = _entityService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(nodeUrns), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - ); + final Map nodeInfoResponses = + _entityService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(nodeUrns), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)); // Loop over Nodes and produce changelog List> futures = new LinkedList<>(); @@ -157,26 +188,32 @@ null, null, start, BATCH_SIZE, new SearchFlags().setFulltext(false) continue; } - futures.add(_entityService.alwaysProduceMCLAsync( - nodeUrn, - Constants.GLOSSARY_NODE_ENTITY_NAME, - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - nodeAspectSpec, - null, - nodeInfo, - null, - null, - auditStamp, - ChangeType.RESTATE).getFirst()); + futures.add( + _entityService + .alwaysProduceMCLAsync( + nodeUrn, + Constants.GLOSSARY_NODE_ENTITY_NAME, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + nodeAspectSpec, + null, + nodeInfo, + null, + null, + auditStamp, + ChangeType.RESTATE) + .getFirst()); } - futures.stream().filter(Objects::nonNull).forEach(f -> { - try { - f.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }); + futures.stream() + .filter(Objects::nonNull) + .forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); return nodesResult.getNumEntities(); } @@ -187,7 +224,8 @@ private GlossaryTermInfo mapTermInfo(EntityResponse entityResponse) { return null; } - return new GlossaryTermInfo(aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryTermInfo( + aspectMap.get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); } private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { @@ -196,6 +234,7 @@ private GlossaryNodeInfo mapNodeInfo(EntityResponse entityResponse) { return null; } - return new GlossaryNodeInfo(aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + return new GlossaryNodeInfo( + aspectMap.get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java index 7fcafa24d7b45..e2d59b505a568 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStep.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; @@ -19,9 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This is an opt-in optional upgrade step to migrate your browse paths to the new truncated form. * It is idempotent, can be retried as many times as necessary. @@ -29,13 +28,13 @@ @Slf4j public class UpgradeDefaultBrowsePathsStep extends UpgradeStep { - private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME - ); + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME); private static final String VERSION = "1"; private static final String UPGRADE_ID = "upgrade-default-browse-paths-step"; private static final Integer BATCH_SIZE = 5000; @@ -47,14 +46,18 @@ public UpgradeDefaultBrowsePathsStep(EntityService entityService) { @Override public void upgrade() throws Exception { final AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); int total = 0; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s out of %s of browse paths for entity type %s", - migratedCount, migratedCount + BATCH_SIZE, total, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s out of %s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, total, entityType)); total = getAndMigrateBrowsePaths(entityType, migratedCount, auditStamp); migratedCount += BATCH_SIZE; } while (migratedCount < total); @@ -71,21 +74,24 @@ public ExecutionMode getExecutionMode() { private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp auditStamp) throws Exception { - final ListResult latestAspects = _entityService.listLatestAspects( - entityType, - Constants.BROWSE_PATHS_ASPECT_NAME, - start, - BATCH_SIZE); + final ListResult latestAspects = + _entityService.listLatestAspects( + entityType, Constants.BROWSE_PATHS_ASPECT_NAME, start, BATCH_SIZE); - if (latestAspects.getTotalCount() == 0 || latestAspects.getValues() == null || latestAspects.getMetadata() == null) { - log.debug(String.format("Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); + if (latestAspects.getTotalCount() == 0 + || latestAspects.getValues() == null + || latestAspects.getMetadata() == null) { + log.debug( + String.format( + "Found 0 browse paths for entity with type %s. Skipping migration!", entityType)); return 0; } if (latestAspects.getValues().size() != latestAspects.getMetadata().getExtraInfos().size()) { // Bad result -- we should log that we cannot migrate this batch of paths. - log.warn("Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" - + "and metadata ({}) for metadata {}", + log.warn( + "Failed to match browse path aspects with corresponding urns. Found mismatched length between aspects ({})" + + "and metadata ({}) for metadata {}", latestAspects.getValues().size(), latestAspects.getMetadata().getExtraInfos().size(), latestAspects.getMetadata()); @@ -107,7 +113,8 @@ private int getAndMigrateBrowsePaths(String entityType, int start, AuditStamp au log.debug(String.format("Inspecting browse path for urn %s, value %s", urn, browsePaths)); if (browsePaths.hasPaths() && browsePaths.getPaths().size() == 1) { - String legacyBrowsePath = BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); + String legacyBrowsePath = + BrowsePathUtils.getLegacyDefaultBrowsePath(urn, _entityService.getEntityRegistry()); log.debug(String.format("Legacy browse path for urn %s, value %s", urn, legacyBrowsePath)); if (legacyBrowsePath.equals(browsePaths.getPaths().get(0))) { migrateBrowsePath(urn, auditStamp); @@ -126,13 +133,9 @@ private void migrateBrowsePath(Urn urn, AuditStamp auditStamp) throws Exception proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(newPaths)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); + _entityService.ingestProposal(proposal, auditStamp, false); } - -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java index 5cac32cfe1a42..409285fc8f1e9 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/WaitForSystemUpdateStep.java @@ -19,7 +19,8 @@ public String name() { @Override public void execute() throws Exception { if (!_dataHubUpgradeKafkaListener.waitForBootstrap()) { - throw new IllegalStateException("Build indices was unsuccessful, stopping bootstrap process."); + throw new IllegalStateException( + "Build indices was unsuccessful, stopping bootstrap process."); } } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java index 52fee1342755c..67d0976a1b0a8 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RAPServletFactory.java @@ -1,5 +1,7 @@ package com.linkedin.restli.server; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.linkedin.data.codec.AbstractJacksonDataCodec; import com.linkedin.metadata.filter.RestliLoggingFilter; @@ -10,59 +12,62 @@ import com.linkedin.r2.transport.http.server.RAPServlet; import com.linkedin.restli.docgen.DefaultDocumentationRequestHandler; import com.linkedin.restli.server.spring.SpringInjectResourceFactory; +import java.util.concurrent.Executors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.concurrent.Executors; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j @Configuration public class RAPServletFactory { - @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") - private Integer environmentThreads; + @Value("#{systemEnvironment['RESTLI_SERVLET_THREADS']}") + private Integer environmentThreads; + + @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") + private int maxSerializedStringLength; - @Value("${" + INGESTION_MAX_SERIALIZED_STRING_LENGTH + ":16000000}") - private int maxSerializedStringLength; + @Bean(name = "restliSpringInjectResourceFactory") + public SpringInjectResourceFactory springInjectResourceFactory() { + return new SpringInjectResourceFactory(); + } - @Bean(name = "restliSpringInjectResourceFactory") - public SpringInjectResourceFactory springInjectResourceFactory() { - return new SpringInjectResourceFactory(); - } + @Bean("parseqEngineThreads") + public int parseqEngineThreads() { + return environmentThreads != null + ? environmentThreads + : (Runtime.getRuntime().availableProcessors() + 1); + } - @Bean("parseqEngineThreads") - public int parseqEngineThreads() { - return environmentThreads != null ? environmentThreads : (Runtime.getRuntime().availableProcessors() + 1); - } - @Bean - public RAPServlet rapServlet( - @Qualifier("restliSpringInjectResourceFactory") SpringInjectResourceFactory springInjectResourceFactory, - @Qualifier("parseqEngineThreads") int threads) { - log.info("Starting restli servlet with {} threads.", threads); - Engine parseqEngine = new EngineBuilder() - .setTaskExecutor(Executors.newFixedThreadPool(threads)) - .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) - .build(); + @Bean + public RAPServlet rapServlet( + @Qualifier("restliSpringInjectResourceFactory") + SpringInjectResourceFactory springInjectResourceFactory, + @Qualifier("parseqEngineThreads") int threads) { + log.info("Starting restli servlet with {} threads.", threads); + Engine parseqEngine = + new EngineBuilder() + .setTaskExecutor(Executors.newFixedThreadPool(threads)) + .setTimerScheduler(Executors.newSingleThreadScheduledExecutor()) + .build(); - // !!!!!!! IMPORTANT !!!!!!! - // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. Without this the limit is - // whatever Jackson is defaulting to (5 MB currently). - AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSerializedStringLength).build()); - // !!!!!!! IMPORTANT !!!!!!! + // !!!!!!! IMPORTANT !!!!!!! + // This effectively sets the max aspect size to 16 MB. Used in deserialization of messages. + // Without this the limit is + // whatever Jackson is defaulting to (5 MB currently). + AbstractJacksonDataCodec.JSON_FACTORY.setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSerializedStringLength).build()); + // !!!!!!! IMPORTANT !!!!!!! - RestLiConfig config = new RestLiConfig(); - config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); - config.setResourcePackageNames("com.linkedin.metadata.resources"); - config.addFilter(new RestliLoggingFilter()); + RestLiConfig config = new RestLiConfig(); + config.setDocumentationRequestHandler(new DefaultDocumentationRequestHandler()); + config.setResourcePackageNames("com.linkedin.metadata.resources"); + config.addFilter(new RestliLoggingFilter()); - RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); - return new RAPServlet(new FilterChainDispatcher(new DelegatingTransportDispatcher(restLiServer, restLiServer), - FilterChains.empty())); - } + RestLiServer restLiServer = new RestLiServer(config, springInjectResourceFactory, parseqEngine); + return new RAPServlet( + new FilterChainDispatcher( + new DelegatingTransportDispatcher(restLiServer, restLiServer), FilterChains.empty())); + } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java index 723f0333999dd..29211d295a2a1 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java +++ b/metadata-service/factories/src/main/java/com/linkedin/restli/server/RestliHandlerServlet.java @@ -1,28 +1,28 @@ package com.linkedin.restli.server; import com.linkedin.r2.transport.http.server.RAPServlet; +import java.io.IOException; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.springframework.web.HttpRequestHandler; import org.springframework.web.context.support.HttpRequestHandlerServlet; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.IOException; - @Component public class RestliHandlerServlet extends HttpRequestHandlerServlet implements HttpRequestHandler { - @Autowired - private RAPServlet _r2Servlet; + @Autowired private RAPServlet _r2Servlet; - @Override - public void service(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException { - _r2Servlet.service(req, res); - } + @Override + public void service(HttpServletRequest req, HttpServletResponse res) + throws ServletException, IOException { + _r2Servlet.service(req, res); + } - @Override - public void handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { - service(request, response); - } + @Override + public void handleRequest(HttpServletRequest request, HttpServletResponse response) + throws ServletException, IOException { + service(request, response); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java index 408c7b67b25f0..19a2a19fcaa4c 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/kafka/SimpleKafkaConsumerFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.kafka; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -8,25 +11,18 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "spring.kafka.properties.security.protocol=SSL" - }, - classes = { - SimpleKafkaConsumerFactory.class, - ConfigurationProvider.class - }) + properties = {"spring.kafka.properties.security.protocol=SSL"}, + classes = {SimpleKafkaConsumerFactory.class, ConfigurationProvider.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SimpleKafkaConsumerFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ConcurrentKafkaListenerContainerFactory testFactory; + @Autowired ConcurrentKafkaListenerContainerFactory testFactory; - @Test - void testInitialization() { - assertNotNull(testFactory); - assertEquals(testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), "SSL"); - } + @Test + void testInitialization() { + assertNotNull(testFactory); + assertEquals( + testFactory.getConsumerFactory().getConfigurationProperties().get("security.protocol"), + "SSL"); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java index 017e8f32886af..6cc1d293e24e6 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/CacheTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static com.datahub.util.RecordUtils.*; +import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; + import com.google.common.collect.ImmutableList; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; @@ -34,136 +37,163 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.datahub.util.RecordUtils.*; -import static com.linkedin.metadata.search.client.CachingEntitySearchService.*; - - public class CacheTest extends JetTestSupport { - HazelcastCacheManager cacheManager1; - HazelcastCacheManager cacheManager2; - HazelcastInstance instance1; - HazelcastInstance instance2; + HazelcastCacheManager cacheManager1; + HazelcastCacheManager cacheManager2; + HazelcastInstance instance1; + HazelcastInstance instance2; - public CacheTest() { - Config config = new Config(); + public CacheTest() { + Config config = new Config(); - instance1 = createHazelcastInstance(config); - instance2 = createHazelcastInstance(config); + instance1 = createHazelcastInstance(config); + instance2 = createHazelcastInstance(config); - cacheManager1 = new HazelcastCacheManager(instance1); - cacheManager2 = new HazelcastCacheManager(instance2); - } + cacheManager1 = new HazelcastCacheManager(instance1); + cacheManager2 = new HazelcastCacheManager(instance2); + } - @Test - public void hazelcastTest() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - SearchResult searchResult = new SearchResult() + @Test + public void hazelcastTest() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + SearchResult searchResult = + new SearchResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setFrom(0) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - Quintet, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> - quintet = Quintet.with(List.of(corpuserUrn.toString()), "*", null, null, - new CacheableSearcher.QueryPagination(0, 1)); - - CacheableSearcher, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher1 = - new CacheableSearcher<>(cacheManager1.getCache("test"), 10, - querySize -> searchResult, - querySize -> quintet, null, true); - - CacheableSearcher, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> cacheableSearcher2 = - new CacheableSearcher<>(cacheManager2.getCache("test"), 10, + Quintet, String, Filter, SortCriterion, CacheableSearcher.QueryPagination> + quintet = + Quintet.with( + List.of(corpuserUrn.toString()), + "*", + null, + null, + new CacheableSearcher.QueryPagination(0, 1)); + + CacheableSearcher< + Quintet, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher1 = + new CacheableSearcher<>( + cacheManager1.getCache("test"), + 10, querySize -> searchResult, - querySize -> quintet, null, true); - - // Cache result - SearchResult result = cacheableSearcher1.getSearchResults(0, 1); - Assert.assertNotEquals(result, null); - - Assert.assertEquals(instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); - Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); - } - - @Test - public void hazelcastTestScroll() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); - ScrollResult scrollResult = new ScrollResult() + querySize -> quintet, + null, + true); + + CacheableSearcher< + Quintet, String, Filter, SortCriterion, CacheableSearcher.QueryPagination>> + cacheableSearcher2 = + new CacheableSearcher<>( + cacheManager2.getCache("test"), + 10, + querySize -> searchResult, + querySize -> quintet, + null, + true); + + // Cache result + SearchResult result = cacheableSearcher1.getSearchResults(0, 1); + Assert.assertNotEquals(result, null); + + Assert.assertEquals( + instance1.getMap("test").get(quintet), instance2.getMap("test").get(quintet)); + Assert.assertEquals(cacheableSearcher1.getSearchResults(0, 1), searchResult); + Assert.assertEquals( + cacheableSearcher1.getSearchResults(0, 1), cacheableSearcher2.getSearchResults(0, 1)); + } + + @Test + public void hazelcastTestScroll() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + SearchEntity searchEntity = new SearchEntity().setEntity(corpuserUrn); + ScrollResult scrollResult = + new ScrollResult() .setEntities(new SearchEntityArray(List.of(searchEntity))) .setNumEntities(1) .setPageSize(1) .setMetadata(new SearchResultMetadata()); - final Criterion filterCriterion = new Criterion() + final Criterion filterCriterion = + new Criterion() .setField("platform") .setCondition(Condition.EQUAL) .setValue("hive") .setValues(new StringArray(ImmutableList.of("hive"))); - final Criterion subtypeCriterion = new Criterion() + final Criterion subtypeCriterion = + new Criterion() .setField("subtypes") .setCondition(Condition.EQUAL) .setValue("") .setValues(new StringArray(ImmutableList.of("view"))); - final Filter filterWithCondition = new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(filterCriterion))), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of(subtypeCriterion))) - )); - - Sextet, String, String, String, String, Integer> - sextet = Sextet.with(List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); - - Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); - - // Cache result - String json = toJsonString(scrollResult); - cache1.put(sextet, json); - Assert.assertEquals(instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), - instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); - String cachedResult1 = cache1.get(sextet, String.class); - String cachedResult2 = cache2.get(sextet, String.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(sextet, String.class), json); - Assert.assertEquals(cache2.get(sextet, String.class), json); - } - - @Test - public void testLineageCaching() { - CorpuserUrn corpuserUrn = new CorpuserUrn("user"); - EntityLineageResult lineageResult = new EntityLineageResult(); - LineageRelationshipArray array = new LineageRelationshipArray(); - LineageRelationship lineageRelationship = new LineageRelationship().setEntity(corpuserUrn).setType("type"); - for (int i = 0; i < 10000; i++) { - array.add(lineageRelationship); - } - lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); - CachedEntityLineageResult cachedEntityLineageResult = new CachedEntityLineageResult(lineageResult, - System.currentTimeMillis()); - - Cache cache1 = cacheManager1.getCache("relationshipSearchService"); - Cache cache2 = cacheManager2.getCache("relationshipSearchService"); - - EntityLineageResultCacheKey key = new EntityLineageResultCacheKey(corpuserUrn, LineageDirection.DOWNSTREAM, - 0L, 1L, 1, ChronoUnit.DAYS); - - cache1.put(key, cachedEntityLineageResult); - - Assert.assertEquals(instance1.getMap("relationshipSearchService").get(key), - instance2.getMap("relationshipSearchService").get(key)); - CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); - CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); - Assert.assertEquals(cachedResult1, cachedResult2); - Assert.assertEquals(cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); - Assert.assertEquals(cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + final Filter filterWithCondition = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(filterCriterion))), + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(subtypeCriterion))))); + + Sextet, String, String, String, String, Integer> sextet = + Sextet.with( + List.of(corpuserUrn.toString()), "*", toJsonString(filterWithCondition), null, null, 1); + + Cache cache1 = cacheManager1.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + Cache cache2 = cacheManager2.getCache(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME); + + // Cache result + String json = toJsonString(scrollResult); + cache1.put(sextet, json); + Assert.assertEquals( + instance1.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet), + instance2.getMap(ENTITY_SEARCH_SERVICE_SCROLL_CACHE_NAME).get(sextet)); + String cachedResult1 = cache1.get(sextet, String.class); + String cachedResult2 = cache2.get(sextet, String.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals(cache1.get(sextet, String.class), json); + Assert.assertEquals(cache2.get(sextet, String.class), json); + } + + @Test + public void testLineageCaching() { + CorpuserUrn corpuserUrn = new CorpuserUrn("user"); + EntityLineageResult lineageResult = new EntityLineageResult(); + LineageRelationshipArray array = new LineageRelationshipArray(); + LineageRelationship lineageRelationship = + new LineageRelationship().setEntity(corpuserUrn).setType("type"); + for (int i = 0; i < 10000; i++) { + array.add(lineageRelationship); } + lineageResult.setRelationships(array).setCount(1).setStart(0).setTotal(1); + CachedEntityLineageResult cachedEntityLineageResult = + new CachedEntityLineageResult(lineageResult, System.currentTimeMillis()); + + Cache cache1 = cacheManager1.getCache("relationshipSearchService"); + Cache cache2 = cacheManager2.getCache("relationshipSearchService"); + + EntityLineageResultCacheKey key = + new EntityLineageResultCacheKey( + corpuserUrn, LineageDirection.DOWNSTREAM, 0L, 1L, 1, ChronoUnit.DAYS); + + cache1.put(key, cachedEntityLineageResult); + + Assert.assertEquals( + instance1.getMap("relationshipSearchService").get(key), + instance2.getMap("relationshipSearchService").get(key)); + CachedEntityLineageResult cachedResult1 = cache1.get(key, CachedEntityLineageResult.class); + CachedEntityLineageResult cachedResult2 = cache2.get(key, CachedEntityLineageResult.class); + Assert.assertEquals(cachedResult1, cachedResult2); + Assert.assertEquals( + cache1.get(key, CachedEntityLineageResult.class), cachedEntityLineageResult); + Assert.assertEquals( + cache2.get(key, CachedEntityLineageResult.class).getEntityLineageResult(), lineageResult); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java index 266039afb45d5..f910f7981b138 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchBulkProcessorFactoryTest.java @@ -1,5 +1,8 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.update.ESBulkProcessor; import org.opensearch.action.support.WriteRequest; @@ -10,19 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchBulkProcessorFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchBulkProcessorFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESBulkProcessor test; + @Autowired ESBulkProcessor test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(WriteRequest.RefreshPolicy.NONE, test.getWriteRequestRefreshPolicy()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java index 6ef623648640a..a3f3f469ea611 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryEmptyTest.java @@ -1,33 +1,31 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides=", - "elasticsearch.index.entitySettingsOverrides=", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides=", + "elasticsearch.index.entitySettingsOverrides=", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryEmptyTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java index 21c3265753ac5..fa4575c1e4142 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryOverridesTest.java @@ -1,31 +1,36 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.*; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import org.springframework.beans.factory.annotation.Autowired; - -import static org.testng.Assert.*; @SpringBootTest( - properties = { - "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", - "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", - "elasticsearch.index.prefix=test_prefix" - }, - classes = {ElasticSearchIndexBuilderFactory.class}) + properties = { + "elasticsearch.index.settingsOverrides={\"my_index\":{\"number_of_shards\":\"10\"}}", + "elasticsearch.index.entitySettingsOverrides={\"my_entity\":{\"number_of_shards\":\"5\"}}", + "elasticsearch.index.prefix=test_prefix" + }, + classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) -public class ElasticSearchIndexBuilderFactoryOverridesTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; +public class ElasticSearchIndexBuilderFactoryOverridesTest + extends AbstractTestNGSpringContextTests { + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals("10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); - assertEquals("5", test.getIndexSettingOverrides().get("test_prefix_my_entityindex_v2").get("number_of_shards")); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals( + "10", test.getIndexSettingOverrides().get("test_prefix_my_index").get("number_of_shards")); + assertEquals( + "5", + test.getIndexSettingOverrides() + .get("test_prefix_my_entityindex_v2") + .get("number_of_shards")); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java index 4d63d18f370eb..2c309cb44b04e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/search/ElasticSearchIndexBuilderFactoryTest.java @@ -1,7 +1,11 @@ package com.linkedin.gms.factory.search; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.test.context.SpringBootTest; @@ -9,21 +13,15 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.util.Map; - -import static org.testng.Assert.assertNotNull; -import static org.testng.Assert.assertEquals; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {ElasticSearchIndexBuilderFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class ElasticSearchIndexBuilderFactoryTest extends AbstractTestNGSpringContextTests { - @Autowired - ESIndexBuilder test; + @Autowired ESIndexBuilder test; - @Test - void testInjection() { - assertNotNull(test); - assertEquals(Map.of(), test.getIndexSettingOverrides()); - } + @Test + void testInjection() { + assertNotNull(test); + assertEquals(Map.of(), test.getIndexSettingOverrides()); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java index 6461df2894326..a8e6b50089602 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java @@ -1,7 +1,12 @@ package com.linkedin.gms.factory.secret; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.secret.SecretService; +import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.properties.EnableConfigurationProperties; @@ -10,28 +15,22 @@ import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - @TestPropertySource(locations = "classpath:/application.yml") @SpringBootTest(classes = {SecretServiceFactory.class}) @EnableConfigurationProperties(ConfigurationProvider.class) public class SecretServiceFactoryTest extends AbstractTestNGSpringContextTests { - @Value("${secretService.encryptionKey}") - private String encryptionKey; + @Value("${secretService.encryptionKey}") + private String encryptionKey; - @Autowired - SecretService test; + @Autowired SecretService test; - @Test - void testInjection() throws IOException { - assertEquals(encryptionKey, "ENCRYPTION_KEY"); - assertNotNull(test); - assertEquals(test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), - "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); - } + @Test + void testInjection() throws IOException { + assertEquals(encryptionKey, "ENCRYPTION_KEY"); + assertNotNull(test); + assertEquals( + test.getHashedPassword("".getBytes(StandardCharsets.UTF_8), "password"), + "XohImNooBHFR0OVvjcYpJ3NgPQ1qq73WKhHvch0VQtg="); + } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java index 49fce75ab7c61..8268eeff48c5e 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/BackfillBrowsePathsV2StepTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePathEntry; @@ -20,56 +22,57 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchService; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class BackfillBrowsePathsV2StepTest { private static final String VERSION = "2"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "backfill-default-browse-paths-v2-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "backfill-default-browse-paths-v2-step"); - private static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + private static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; - private static final String DATA_JOB_URN = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATA_JOB_URN = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; private static final String DATA_FLOW_URN = "urn:li:dataFlow:(orchestrator,flowId,cluster)"; - private static final String ML_MODEL_URN = "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; - private static final String ML_MODEL_GROUP_URN = "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; - private static final String ML_FEATURE_TABLE_URN = "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; + private static final String ML_MODEL_URN = + "urn:li:mlModel:(urn:li:dataPlatform:sagemaker,trustmodel,PROD)"; + private static final String ML_MODEL_GROUP_URN = + "urn:li:mlModelGroup:(urn:li:dataPlatform:sagemaker,a-model-package-group,PROD)"; + private static final String ML_FEATURE_TABLE_URN = + "urn:li:mlFeatureTable:(urn:li:dataPlatform:feast,user_features)"; private static final String ML_FEATURE_URN = "urn:li:mlFeature:(test,feature_1)"; - private static final List ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); - private static final List ENTITY_URNS = ImmutableList.of( - UrnUtils.getUrn(DATASET_URN), - UrnUtils.getUrn(DASHBOARD_URN), - UrnUtils.getUrn(CHART_URN), - UrnUtils.getUrn(DATA_JOB_URN), - UrnUtils.getUrn(DATA_FLOW_URN), - UrnUtils.getUrn(ML_MODEL_URN), - UrnUtils.getUrn(ML_MODEL_GROUP_URN), - UrnUtils.getUrn(ML_FEATURE_TABLE_URN), - UrnUtils.getUrn(ML_FEATURE_URN) - ); - + private static final List ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); + private static final List ENTITY_URNS = + ImmutableList.of( + UrnUtils.getUrn(DATASET_URN), + UrnUtils.getUrn(DASHBOARD_URN), + UrnUtils.getUrn(CHART_URN), + UrnUtils.getUrn(DATA_JOB_URN), + UrnUtils.getUrn(DATA_FLOW_URN), + UrnUtils.getUrn(ML_MODEL_URN), + UrnUtils.getUrn(ML_MODEL_GROUP_URN), + UrnUtils.getUrn(ML_FEATURE_TABLE_URN), + UrnUtils.getUrn(ML_FEATURE_URN)); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -77,31 +80,32 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockSearchService, Mockito.times(9)).scrollAcrossEntities( - Mockito.any(), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - ); - // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of each entity type - Mockito.verify(mockService, Mockito.times(11)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockSearchService, Mockito.times(9)) + .scrollAcrossEntities( + Mockito.any(), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null)); + // Verify that 11 aspects are ingested, 2 for the upgrade request / result, 9 for ingesting 1 of + // each entity type + Mockito.verify(mockService, Mockito.times(11)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -110,42 +114,51 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final SearchService mockSearchService = initMockSearchService(); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION); Map upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); - - BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = new BackfillBrowsePathsV2Step(mockService, mockSearchService); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); + + BackfillBrowsePathsV2Step backfillBrowsePathsV2Step = + new BackfillBrowsePathsV2Step(mockService, mockSearchService); backfillBrowsePathsV2Step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private EntityService initMockService() throws URISyntaxException { - final EntityService mockService = Mockito.mock(EntityService.class); + final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new UpgradeDefaultBrowsePathsStepTest.TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockService.buildDefaultBrowsePathV2(Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))).thenReturn( - new BrowsePathsV2().setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); - - Mockito.when(mockService.getEntityV2( - Mockito.any(), - Mockito.eq(ENTITY_URNS.get(i)), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.buildDefaultBrowsePathV2( + Mockito.eq(ENTITY_URNS.get(i)), Mockito.eq(true))) + .thenReturn( + new BrowsePathsV2() + .setPath(new BrowsePathEntryArray(new BrowsePathEntry().setId("test")))); + + Mockito.when( + mockService.getEntityV2( + Mockito.any(), + Mockito.eq(ENTITY_URNS.get(i)), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(null); } return mockService; @@ -155,16 +168,21 @@ private SearchService initMockSearchService() { final SearchService mockSearchService = Mockito.mock(SearchService.class); for (int i = 0; i < ENTITY_TYPES.size(); i++) { - Mockito.when(mockSearchService.scrollAcrossEntities( - Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), - Mockito.eq("*"), - Mockito.any(Filter.class), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq("5m"), - Mockito.eq(5000), - Mockito.eq(null) - )).thenReturn(new ScrollResult().setNumEntities(1).setEntities(new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); + Mockito.when( + mockSearchService.scrollAcrossEntities( + Mockito.eq(ImmutableList.of(ENTITY_TYPES.get(i))), + Mockito.eq("*"), + Mockito.any(Filter.class), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq("5m"), + Mockito.eq(5000), + Mockito.eq(null))) + .thenReturn( + new ScrollResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray(new SearchEntity().setEntity(ENTITY_URNS.get(i))))); } return mockSearchService; diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java index 0ae8eb2cba808..976698f3032d2 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDataPlatformInstancesStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,31 +21,31 @@ import org.jetbrains.annotations.NotNull; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDataPlatformInstancesStep. * - * We expect it to check if any data platform instance aspects already exist in the database and if none are found, - * to go through all the stored entities and ingest a data platform instance aspect for any that are compatible with it. + *

We expect it to check if any data platform instance aspects already exist in the database and + * if none are found, to go through all the stored entities and ingest a data platform instance + * aspect for any that are compatible with it. * - * CorpUser is used as an example of an entity that is not compatible with data platform instance and therefore should be ignored. - * Char is used as an example of an entity that should get adorned with a data platform instance. + *

CorpUser is used as an example of an entity that is not compatible with data platform instance + * and therefore should be ignored. Char is used as an example of an entity that should get adorned + * with a data platform instance. * - * See {@link DataPlatformInstanceUtils} for the compatibility rules. + *

See {@link DataPlatformInstanceUtils} for the compatibility rules. */ public class IngestDataPlatformInstancesStepTest { @Test - public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() throws Exception { + public void testExecuteDoesNothingWhenDataPlatformInstanceAspectsAlreadyExists() + throws Exception { final EntityService entityService = mock(EntityService.class); final AspectMigrationsDao migrationsDao = mock(AspectMigrationsDao.class); mockDBWithDataPlatformInstanceAspects(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -57,7 +60,8 @@ public void testExecuteCopesWithEmptyDB() throws Exception { mockEmptyDB(migrationsDao); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(migrationsDao, times(1)).checkIfAspectExists(anyString()); @@ -75,9 +79,15 @@ public void testExecuteChecksKeySpecForAllUrns() throws Exception { final int countOfChartEntities = 4; final int totalUrnsInDB = countOfCorpUserEntities + countOfChartEntities; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(totalUrnsInDB)).getKeyAspectSpec(any(Urn.class)); @@ -91,35 +101,55 @@ public void testExecuteWhenSomeEntitiesShouldReceiveDataPlatformInstance() throw final int countOfCorpUserEntities = 5; final int countOfChartEntities = 7; - mockDBWithWorkToDo(entityRegistry, entityService, migrationsDao, countOfCorpUserEntities, countOfChartEntities); + mockDBWithWorkToDo( + entityRegistry, + entityService, + migrationsDao, + countOfCorpUserEntities, + countOfChartEntities); - final IngestDataPlatformInstancesStep step = new IngestDataPlatformInstancesStep(entityService, migrationsDao); + final IngestDataPlatformInstancesStep step = + new IngestDataPlatformInstancesStep(entityService, migrationsDao); step.execute(); verify(entityService, times(1)) .ingestAspects( - argThat(arg -> - arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), + argThat( + arg -> + arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), any(), anyBoolean(), anyBoolean()); verify(entityService, times(0)) - .ingestAspects(argThat(arg -> - !arg.getItems().stream() - .allMatch(item -> item.getUrn().getEntityType().equals("chart") - && item.getAspectName().equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) - && ((UpsertBatchItem) item).getAspect() instanceof DataPlatformInstance) - ), any(), anyBoolean(), anyBoolean()); + .ingestAspects( + argThat( + arg -> + !arg.getItems().stream() + .allMatch( + item -> + item.getUrn().getEntityType().equals("chart") + && item.getAspectName() + .equals(DATA_PLATFORM_INSTANCE_ASPECT_NAME) + && ((UpsertBatchItem) item).getAspect() + instanceof DataPlatformInstance)), + any(), + anyBoolean(), + anyBoolean()); } @NotNull private ConfigEntityRegistry getTestEntityRegistry() { return new ConfigEntityRegistry( - IngestDataPlatformInstancesStepTest.class.getClassLoader().getResourceAsStream("test-entity-registry.yaml")); + IngestDataPlatformInstancesStepTest.class + .getClassLoader() + .getResourceAsStream("test-entity-registry.yaml")); } private void mockDBWithDataPlatformInstanceAspects(AspectMigrationsDao migrationsDao) { @@ -137,18 +167,36 @@ private void mockDBWithWorkToDo( AspectMigrationsDao migrationsDao, int countOfCorpUserEntities, int countOfChartEntities) { - List corpUserUrns = insertMockEntities(countOfCorpUserEntities, "corpuser", "urn:li:corpuser:test%d", entityRegistry, - entityService); - List charUrns = insertMockEntities(countOfChartEntities, "chart", "urn:li:chart:(looker,test%d)", entityRegistry, - entityService); - List allUrnsInDB = Stream.concat(corpUserUrns.stream(), charUrns.stream()).map(Urn::toString).collect(Collectors.toList()); + List corpUserUrns = + insertMockEntities( + countOfCorpUserEntities, + "corpuser", + "urn:li:corpuser:test%d", + entityRegistry, + entityService); + List charUrns = + insertMockEntities( + countOfChartEntities, + "chart", + "urn:li:chart:(looker,test%d)", + entityRegistry, + entityService); + List allUrnsInDB = + Stream.concat(corpUserUrns.stream(), charUrns.stream()) + .map(Urn::toString) + .collect(Collectors.toList()); when(migrationsDao.checkIfAspectExists(DATA_PLATFORM_INSTANCE_ASPECT_NAME)).thenReturn(false); when(migrationsDao.countEntities()).thenReturn((long) allUrnsInDB.size()); when(migrationsDao.listAllUrns(anyInt(), anyInt())).thenReturn(allUrnsInDB); when(entityService.getEntityRegistry()).thenReturn(entityRegistry); } - private List insertMockEntities(int count, String entity, String urnTemplate, EntityRegistry entityRegistry, EntityService entityService) { + private List insertMockEntities( + int count, + String entity, + String urnTemplate, + EntityRegistry entityRegistry, + EntityService entityService) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entity); AspectSpec keySpec = entitySpec.getKeyAspectSpec(); List urns = new ArrayList<>(); diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java index 24bdd193a39c8..b28a6e9f5cc5b 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/IngestDefaultGlobalSettingsStepTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.boot.steps; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.events.metadata.ChangeType; @@ -12,15 +15,11 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - /** * Test the behavior of IngestDefaultGlobalSettingsStep. * - * We expect it to ingest a JSON file, throwing if the JSON file - * is malformed or does not match the PDL model for GlobalSettings.pdl. + *

We expect it to ingest a JSON file, throwing if the JSON file is malformed or does not match + * the PDL model for GlobalSettings.pdl. */ public class IngestDefaultGlobalSettingsStepTest { @@ -29,20 +28,21 @@ public void testExecuteValidSettingsNoExistingSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:test"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -50,26 +50,29 @@ public void testExecuteValidSettingsExistingSettings() throws Exception { // Verify that the user provided settings overrides are NOT overwritten. final EntityService entityService = mock(EntityService.class); - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings() - .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo() + .setViews( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); configureEntityServiceMock(entityService, existingSettings); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_valid.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_valid.json"); step.execute(); // Verify that the merge preserves the user settings. GlobalSettingsInfo expectedResult = new GlobalSettingsInfo(); - expectedResult.setViews(new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); - - Mockito.verify(entityService, times(1)).ingestProposal( - Mockito.eq(buildUpdateSettingsProposal(expectedResult)), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + expectedResult.setViews( + new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:custom"))); + + Mockito.verify(entityService, times(1)) + .ingestProposal( + Mockito.eq(buildUpdateSettingsProposal(expectedResult)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -77,9 +80,9 @@ public void testExecuteInvalidJsonSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_json.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_json.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -92,9 +95,9 @@ public void testExecuteInvalidModelSettings() throws Exception { final EntityService entityService = mock(EntityService.class); configureEntityServiceMock(entityService, null); - final IngestDefaultGlobalSettingsStep step = new IngestDefaultGlobalSettingsStep( - entityService, - "./boot/test_global_settings_invalid_model.json"); + final IngestDefaultGlobalSettingsStep step = + new IngestDefaultGlobalSettingsStep( + entityService, "./boot/test_global_settings_invalid_model.json"); Assert.assertThrows(RuntimeException.class, step::execute); @@ -102,15 +105,18 @@ public void testExecuteInvalidModelSettings() throws Exception { verifyNoInteractions(entityService); } - private static void configureEntityServiceMock(final EntityService mockService, final GlobalSettingsInfo settingsInfo) { - Mockito.when(mockService.getAspect( - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - Mockito.eq(0L) - )).thenReturn(settingsInfo); + private static void configureEntityServiceMock( + final EntityService mockService, final GlobalSettingsInfo settingsInfo) { + Mockito.when( + mockService.getAspect( + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(settingsInfo); } - private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSettingsInfo settings) { + private static MetadataChangeProposal buildUpdateSettingsProposal( + final GlobalSettingsInfo settings) { final MetadataChangeProposal mcp = new MetadataChangeProposal(); mcp.setEntityUrn(GLOBAL_SETTINGS_URN); mcp.setEntityType(GLOBAL_SETTINGS_ENTITY_NAME); @@ -119,4 +125,4 @@ private static MetadataChangeProposal buildUpdateSettingsProposal(final GlobalSe mcp.setAspect(GenericRecordUtils.serializeAspect(settings)); return mcp; } -} \ No newline at end of file +} diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java index aca5e322567d8..5a9e93f70c952 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreColumnLineageIndicesTest.java @@ -22,25 +22,27 @@ import com.linkedin.metadata.query.ListResultMetadata; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import org.mockito.Mockito; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import javax.annotation.Nonnull; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreColumnLineageIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; private static final String COLUMN_LINEAGE_UPGRADE_URN = - String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); - private final Urn datasetUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-column-lineage-indices"); + private final Urn datasetUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hdfs,SampleHdfsDataset,PROD)"); private final Urn chartUrn = UrnUtils.getUrn("urn:li:chart:(looker,dashboard_elements.1)"); - private final Urn dashboardUrn = UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); + private final Urn dashboardUrn = + UrnUtils.getUrn("urn:li:dashboard:(looker,dashboards.thelook::web_analytics_overview)"); @Test public void testExecuteFirstTime() throws Exception { @@ -54,54 +56,55 @@ public void testExecuteFirstTime() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -116,54 +119,55 @@ public void testExecuteWithNewVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test @@ -178,106 +182,126 @@ public void testDoesNotExecuteWithSameVersion() throws Exception { final AspectSpec aspectSpec = mockAspectSpecs(mockRegistry); - final RestoreColumnLineageIndices restoreIndicesStep = new RestoreColumnLineageIndices(mockService, mockRegistry); + final RestoreColumnLineageIndices restoreIndicesStep = + new RestoreColumnLineageIndices(mockService, mockRegistry); restoreIndicesStep.execute(); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DATASET_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.CHART_ENTITY_NAME); Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.DASHBOARD_ENTITY_NAME); // creates upgradeRequest and upgradeResult aspects - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(datasetUrn), - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(chartUrn), - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(dashboardUrn), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(datasetUrn), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(chartUrn), + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(dashboardUrn), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } private void mockGetUpstreamLineage(@Nonnull Urn datasetUrn, @Nonnull EntityService mockService) { - final List extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(datasetUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + final List extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(datasetUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new UpstreamLineage()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new UpstreamLineage()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } - private void mockGetInputFields(@Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { - final List extraInfos = ImmutableList.of( - new ExtraInfo() - .setUrn(entityUrn) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); + private void mockGetInputFields( + @Nonnull Urn entityUrn, @Nonnull String entityName, @Nonnull EntityService mockService) { + final List extraInfos = + ImmutableList.of( + new ExtraInfo() + .setUrn(entityUrn) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityName), - Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(1000) - )).thenReturn(new ListResult<>( - ImmutableList.of(new InputFields()), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), - 1, - false, - 1, - 1, - 1)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityName), + Mockito.eq(Constants.INPUT_FIELDS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new ListResult<>( + ImmutableList.of(new InputFields()), + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos)), + 1, + false, + 1, + 1, + 1)); } private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { @@ -285,28 +309,39 @@ private AspectSpec mockAspectSpecs(@Nonnull EntityRegistry mockRegistry) { final AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for upstreamLineage Mockito.when(mockRegistry.getEntitySpec(Constants.DATASET_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for charts Mockito.when(mockRegistry.getEntitySpec(Constants.CHART_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock inputFields for dashboards - Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.DASHBOARD_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.INPUT_FIELDS_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } - private void mockGetUpgradeStep(boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) throws Exception { + private void mockGetUpgradeStep( + boolean shouldReturnResponse, @Nonnull String version, @Nonnull EntityService mockService) + throws Exception { final Urn upgradeEntityUrn = UrnUtils.getUrn(COLUMN_LINEAGE_UPGRADE_URN); - final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); + final com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(version); final Map upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - final EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(shouldReturnResponse ? response : null); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + final EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(shouldReturnResponse ? response : null); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java index 3753904053256..a4f0c5e0aaba0 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/RestoreGlossaryIndicesTest.java @@ -13,59 +13,107 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.models.EntitySpec; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; -import java.util.List; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.Future; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RestoreGlossaryIndicesTest { private static final String VERSION_1 = "1"; private static final String VERSION_2 = "2"; - private static final String GLOSSARY_UPGRADE_URN = String.format("urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); + private static final String GLOSSARY_UPGRADE_URN = + String.format( + "urn:li:%s:%s", Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "restore-glossary-indices-ui"); - private void mockGetTermInfo(Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetTermInfo( + Urn glossaryTermUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map termInfoAspects = new HashMap<>(); - termInfoAspects.put(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); + termInfoAspects.put( + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryTermInfo().setName("test").data()))); Map termInfoResponses = new HashMap<>(); - termInfoResponses.put(glossaryTermUrn, new EntityResponse().setUrn(glossaryTermUrn).setAspects(new EnvelopedAspectMap(termInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryTermUrn)), - Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) + termInfoResponses.put( + glossaryTermUrn, + new EntityResponse() + .setUrn(glossaryTermUrn) + .setAspects(new EnvelopedAspectMap(termInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryTermUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryTermUrn)), + Collections.singleton(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME))) .thenReturn(termInfoResponses); } - private void mockGetNodeInfo(Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) throws Exception { + private void mockGetNodeInfo( + Urn glossaryNodeUrn, EntitySearchService mockSearchService, EntityService mockService) + throws Exception { Map nodeInfoAspects = new HashMap<>(); - nodeInfoAspects.put(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); + nodeInfoAspects.put( + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(new GlossaryNodeInfo().setName("test").data()))); Map nodeInfoResponses = new HashMap<>(); - nodeInfoResponses.put(glossaryNodeUrn, new EntityResponse().setUrn(glossaryNodeUrn).setAspects(new EnvelopedAspectMap(nodeInfoAspects))); - Mockito.when(mockSearchService.search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), "", null, null, 0, 1000, - new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true))) - .thenReturn(new SearchResult().setNumEntities(1).setEntities(new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); - Mockito.when(mockService.getEntitiesV2( - Constants.GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(Collections.singleton(glossaryNodeUrn)), - Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) - )) + nodeInfoResponses.put( + glossaryNodeUrn, + new EntityResponse() + .setUrn(glossaryNodeUrn) + .setAspects(new EnvelopedAspectMap(nodeInfoAspects))); + Mockito.when( + mockSearchService.search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true))) + .thenReturn( + new SearchResult() + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(glossaryNodeUrn))))); + Mockito.when( + mockService.getEntitiesV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(Collections.singleton(glossaryNodeUrn)), + Collections.singleton(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME))) .thenReturn(nodeInfoResponses); } @@ -73,200 +121,257 @@ private AspectSpec mockGlossaryAspectSpecs(EntityRegistry mockRegistry) { EntitySpec entitySpec = Mockito.mock(EntitySpec.class); AspectSpec aspectSpec = Mockito.mock(AspectSpec.class); // Mock for Terms - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); // Mock for Nodes - Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)).thenReturn(entitySpec); - Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)).thenReturn(aspectSpec); + Mockito.when(mockRegistry.getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME)) + .thenReturn(entitySpec); + Mockito.when(entitySpec.getAspectSpec(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)) + .thenReturn(aspectSpec); return aspectSpec; } @Test public void testExecuteFirstTime() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(null); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(null); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testExecutesWithNewVersion() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_2); Map upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); - Mockito.when(mockService.alwaysProduceMCLAsync( - Mockito.any(Urn.class), Mockito.anyString(), Mockito.anyString(), Mockito.any(AspectSpec.class), - Mockito.eq(null), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), - Mockito.any(ChangeType.class) - )).thenReturn(Pair.of(Mockito.mock(Future.class), false)); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); + Mockito.when( + mockService.alwaysProduceMCLAsync( + Mockito.any(Urn.class), + Mockito.anyString(), + Mockito.anyString(), + Mockito.any(AspectSpec.class), + Mockito.eq(null), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(ChangeType.class))) + .thenReturn(Pair.of(Mockito.mock(Future.class), false)); mockGetTermInfo(glossaryTermUrn, mockSearchService, mockService); mockGetNodeInfo(glossaryNodeUrn, mockSearchService, mockService); AspectSpec aspectSpec = mockGlossaryAspectSpecs(mockRegistry); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(1)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(1)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.eq(aspectSpec), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(1)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(1)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.eq(aspectSpec), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } @Test public void testDoesNotRunWhenAlreadyExecuted() throws Exception { - final Urn glossaryTermUrn = Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); - final Urn glossaryNodeUrn = Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); + final Urn glossaryTermUrn = + Urn.createFromString("urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"); + final Urn glossaryNodeUrn = + Urn.createFromString("urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"); final EntityService mockService = Mockito.mock(EntityService.class); final EntitySearchService mockSearchService = Mockito.mock(EntitySearchService.class); final EntityRegistry mockRegistry = Mockito.mock(EntityRegistry.class); final Urn upgradeEntityUrn = Urn.createFromString(GLOSSARY_UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - upgradeEntityUrn, - Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME) - )).thenReturn(response); + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, + upgradeEntityUrn, + Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME))) + .thenReturn(response); - RestoreGlossaryIndices restoreIndicesStep = new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); + RestoreGlossaryIndices restoreIndicesStep = + new RestoreGlossaryIndices(mockService, mockSearchService, mockRegistry); restoreIndicesStep.execute(); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); - Mockito.verify(mockRegistry, Mockito.times(0)).getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockSearchService, Mockito.times(0)).search(List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), - "", null, null, 0, 1000, new SearchFlags().setFulltext(false) - .setSkipAggregates(true).setSkipHighlighting(true)); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryTermUrn), - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); - Mockito.verify(mockService, Mockito.times(0)).alwaysProduceMCLAsync( - Mockito.eq(glossaryNodeUrn), - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(ChangeType.RESTATE) - ); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_TERM_ENTITY_NAME); + Mockito.verify(mockRegistry, Mockito.times(0)) + .getEntitySpec(Constants.GLOSSARY_NODE_ENTITY_NAME); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_TERM_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockSearchService, Mockito.times(0)) + .search( + List.of(Constants.GLOSSARY_NODE_ENTITY_NAME), + "", + null, + null, + 0, + 1000, + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true)); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryTermUrn), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); + Mockito.verify(mockService, Mockito.times(0)) + .alwaysProduceMCLAsync( + Mockito.eq(glossaryNodeUrn), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(ChangeType.RESTATE)); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java index 5e4ad6e7fe880..17159ba1baf53 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/metadata/boot/steps/UpgradeDefaultBrowsePathsStepTest.java @@ -37,14 +37,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; - public class UpgradeDefaultBrowsePathsStepTest { private static final String VERSION_1 = "1"; - private static final String UPGRADE_URN = String.format( - "urn:li:%s:%s", - Constants.DATA_HUB_UPGRADE_ENTITY_NAME, - "upgrade-default-browse-paths-step"); + private static final String UPGRADE_URN = + String.format( + "urn:li:%s:%s", + Constants.DATA_HUB_UPGRADE_ENTITY_NAME, "upgrade-default-browse-paths-step"); @Test public void testExecuteNoExistingBrowsePaths() throws Exception { @@ -54,180 +53,218 @@ public void testExecuteNoExistingBrowsePaths() throws Exception { Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); final List browsePaths1 = Collections.emptyList(); - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 2)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for ingesting - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result, but none for + // ingesting + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testExecuteFirstTime() throws Exception { - Urn testUrn1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); - Urn testUrn2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); + Urn testUrn1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset1,PROD)"); + Urn testUrn2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset2,PROD)"); final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); - Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))).thenReturn( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn1))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); + Mockito.when(mockService.buildDefaultBrowsePath(Mockito.eq(testUrn2))) + .thenReturn(new BrowsePaths().setPaths(new StringArray(ImmutableList.of("/prod/kafka")))); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - final List browsePaths1 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of(BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry)))) - ); - - final List extraInfos1 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn1) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn2) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)) - ); - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths1, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + final List browsePaths1 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn1, registry)))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn2, registry))))); + + final List extraInfos1 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn1) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn2) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths1, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos1)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); - // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse pahts - Mockito.verify(mockService, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); + // Verify that 4 aspects are ingested, 2 for the upgrade request / result and 2 for the browse + // pahts + Mockito.verify(mockService, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test public void testDoesNotRunWhenBrowsePathIsNotQualified() throws Exception { // Test for browse paths that are not ingested - Urn testUrn3 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not migrate - Urn testUrn4 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not migrate + Urn testUrn3 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset3,PROD)"); // Do not + // migrate + Urn testUrn4 = + UrnUtils.getUrn( + "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset4,PROD)"); // Do not + // migrate final EntityService mockService = Mockito.mock(EntityService.class); final EntityRegistry registry = new TestEntityRegistry(); Mockito.when(mockService.getEntityRegistry()).thenReturn(registry); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(null); - - final List browsePaths2 = ImmutableList.of( - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), - new BrowsePaths().setPaths(new StringArray(ImmutableList.of( - BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), - BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.')))) - ); - - final List extraInfos2 = ImmutableList.of( - new ExtraInfo() - .setUrn(testUrn3) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), - new ExtraInfo() - .setUrn(testUrn4) - .setVersion(0L) - .setAudit(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L))); - - - Mockito.when(mockService.listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - browsePaths2, - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), - 2, - false, - 2, - 2, - 2)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(null); + + final List browsePaths2 = + ImmutableList.of( + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getDefaultBrowsePath(testUrn3, registry, '.')))), + new BrowsePaths() + .setPaths( + new StringArray( + ImmutableList.of( + BrowsePathUtils.getLegacyDefaultBrowsePath(testUrn4, registry), + BrowsePathUtils.getDefaultBrowsePath(testUrn4, registry, '.'))))); + + final List extraInfos2 = + ImmutableList.of( + new ExtraInfo() + .setUrn(testUrn3) + .setVersion(0L) + .setAudit( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)), + new ExtraInfo() + .setUrn(testUrn4) + .setVersion(0L) + .setAudit( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L))); + + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + browsePaths2, + new ListResultMetadata().setExtraInfos(new ExtraInfoArray(extraInfos2)), + 2, + false, + 2, + 2, + 2)); initMockServiceOtherEntities(mockService); - UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = new UpgradeDefaultBrowsePathsStep(mockService); + UpgradeDefaultBrowsePathsStep upgradeDefaultBrowsePathsStep = + new UpgradeDefaultBrowsePathsStep(mockService); upgradeDefaultBrowsePathsStep.execute(); - Mockito.verify(mockService, Mockito.times(1)).listLatestAspects( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - ); + Mockito.verify(mockService, Mockito.times(1)) + .listLatestAspects( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000)); // Verify that 2 aspects are ingested, only those for the upgrade step - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(), Mockito.eq(false)); } @Test @@ -235,48 +272,55 @@ public void testDoesNotRunWhenAlreadyExecuted() throws Exception { final EntityService mockService = Mockito.mock(EntityService.class); final Urn upgradeEntityUrn = Urn.createFromString(UPGRADE_URN); - com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); + com.linkedin.upgrade.DataHubUpgradeRequest upgradeRequest = + new com.linkedin.upgrade.DataHubUpgradeRequest().setVersion(VERSION_1); Map upgradeRequestAspects = new HashMap<>(); - upgradeRequestAspects.put(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, + upgradeRequestAspects.put( + Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(upgradeRequest.data()))); - EntityResponse response = new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); - Mockito.when(mockService.getEntityV2( - Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), - Mockito.eq(upgradeEntityUrn), - Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)) - )).thenReturn(response); + EntityResponse response = + new EntityResponse().setAspects(new EnvelopedAspectMap(upgradeRequestAspects)); + Mockito.when( + mockService.getEntityV2( + Mockito.eq(Constants.DATA_HUB_UPGRADE_ENTITY_NAME), + Mockito.eq(upgradeEntityUrn), + Mockito.eq(Collections.singleton(Constants.DATA_HUB_UPGRADE_REQUEST_ASPECT_NAME)))) + .thenReturn(response); UpgradeDefaultBrowsePathsStep step = new UpgradeDefaultBrowsePathsStep(mockService); step.execute(); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.anyBoolean() - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } private void initMockServiceOtherEntities(EntityService mockService) { - List skippedEntityTypes = ImmutableList.of( - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME - ); + List skippedEntityTypes = + ImmutableList.of( + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME); for (String entityType : skippedEntityTypes) { - Mockito.when(mockService.listLatestAspects( - Mockito.eq(entityType), - Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), - Mockito.eq(0), - Mockito.eq(5000) - )).thenReturn(new ListResult<>( - Collections.emptyList(), - new ListResultMetadata().setExtraInfos(new ExtraInfoArray(Collections.emptyList())), - 0, - false, - 0, - 0, - 0)); + Mockito.when( + mockService.listLatestAspects( + Mockito.eq(entityType), + Mockito.eq(Constants.BROWSE_PATHS_ASPECT_NAME), + Mockito.eq(0), + Mockito.eq(5000))) + .thenReturn( + new ListResult<>( + Collections.emptyList(), + new ListResultMetadata() + .setExtraInfos(new ExtraInfoArray(Collections.emptyList())), + 0, + false, + 0, + 0, + 0)); } } @@ -285,10 +329,10 @@ public static class TestEntityRegistry implements EntityRegistry { private final Map entityNameToSpec; public TestEntityRegistry() { - entityNameToSpec = new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) - .buildEntitySpecs(new Snapshot().schema()) - .stream() - .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); + entityNameToSpec = + new EntitySpecBuilder(EntitySpecBuilder.AnnotationExtractionMode.IGNORE_ASPECT_FIELDS) + .buildEntitySpecs(new Snapshot().schema()).stream() + .collect(Collectors.toMap(spec -> spec.getName().toLowerCase(), spec -> spec)); } @Nonnull diff --git a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java index fe0d61986b4a6..9931f044931b6 100644 --- a/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java +++ b/metadata-service/factories/src/test/java/io/datahubproject/telemetry/TelemetryUtilsTest.java @@ -1,5 +1,8 @@ package io.datahubproject.telemetry; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.gms.factory.telemetry.TelemetryUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.telemetry.TelemetryClientId; @@ -7,10 +10,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.AssertJUnit.assertEquals; - - public class TelemetryUtilsTest { EntityService _entityService; @@ -18,7 +17,8 @@ public class TelemetryUtilsTest { @BeforeMethod public void init() { _entityService = Mockito.mock(EntityService.class); - Mockito.when(_entityService.getLatestAspect(any(), anyString())).thenReturn(new TelemetryClientId().setClientId("1234")); + Mockito.when(_entityService.getLatestAspect(any(), anyString())) + .thenReturn(new TelemetryClientId().setClientId("1234")); } @Test diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 043c142da8323..692208c42f90c 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static com.linkedin.metadata.Constants.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -31,9 +33,6 @@ import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RestController public class GraphQLController { @@ -43,20 +42,22 @@ public GraphQLController() { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "call")); } - @Inject - GraphQLEngine _engine; + @Inject GraphQLEngine _engine; - @Inject - AuthorizerChain _authorizerChain; + @Inject AuthorizerChain _authorizerChain; @PostMapping(value = "/graphql", produces = "application/json;charset=utf-8") CompletableFuture> postGraphQL(HttpEntity httpEntity) { String jsonStr = httpEntity.getBody(); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode bodyJson = null; try { bodyJson = mapper.readTree(jsonStr); @@ -81,9 +82,11 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt * Extract "variables" map */ JsonNode variablesJson = bodyJson.get("variables"); - final Map variables = (variablesJson != null && !variablesJson.isNull()) - ? new ObjectMapper().convertValue(variablesJson, new TypeReference>() { }) - : Collections.emptyMap(); + final Map variables = + (variablesJson != null && !variablesJson.isNull()) + ? new ObjectMapper() + .convertValue(variablesJson, new TypeReference>() {}) + : Collections.emptyMap(); log.debug(String.format("Executing graphQL query: %s, variables: %s", queryJson, variables)); @@ -91,61 +94,76 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt * Init QueryContext */ Authentication authentication = AuthenticationContext.getAuthentication(); - SpringQueryContext context = new SpringQueryContext( - true, - authentication, - _authorizerChain); - - return CompletableFuture.supplyAsync(() -> { - /* - * Execute GraphQL Query - */ - ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); - - if (executionResult.getErrors().size() != 0) { - // There were GraphQL errors. Report in error logs. - log.error(String.format("Errors while executing graphQL query: %s, result: %s, errors: %s", - queryJson, - executionResult.toSpecification(), - executionResult.getErrors())); - } else { - log.debug(String.format("Executed graphQL query: %s, result: %s", - queryJson, - executionResult.toSpecification())); - } - - /* - * Format & Return Response - */ - try { - submitMetrics(executionResult); - // Remove tracing from response to reduce bulk, not used by the frontend - executionResult.getExtensions().remove("tracing"); - String responseBodyStr = new ObjectMapper().writeValueAsString(executionResult.toSpecification()); - return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); - } catch (IllegalArgumentException | JsonProcessingException e) { - log.error(String.format("Failed to convert execution result %s into a JsonNode", executionResult.toSpecification())); - return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); - } - }); + SpringQueryContext context = new SpringQueryContext(true, authentication, _authorizerChain); + + return CompletableFuture.supplyAsync( + () -> { + /* + * Execute GraphQL Query + */ + ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context); + + if (executionResult.getErrors().size() != 0) { + // There were GraphQL errors. Report in error logs. + log.error( + String.format( + "Errors while executing graphQL query: %s, result: %s, errors: %s", + queryJson, executionResult.toSpecification(), executionResult.getErrors())); + } else { + log.debug( + String.format( + "Executed graphQL query: %s, result: %s", + queryJson, executionResult.toSpecification())); + } + + /* + * Format & Return Response + */ + try { + submitMetrics(executionResult); + // Remove tracing from response to reduce bulk, not used by the frontend + executionResult.getExtensions().remove("tracing"); + String responseBodyStr = + new ObjectMapper().writeValueAsString(executionResult.toSpecification()); + return new ResponseEntity<>(responseBodyStr, HttpStatus.OK); + } catch (IllegalArgumentException | JsonProcessingException e) { + log.error( + String.format( + "Failed to convert execution result %s into a JsonNode", + executionResult.toSpecification())); + return new ResponseEntity<>(HttpStatus.SERVICE_UNAVAILABLE); + } + }); } @GetMapping("/graphql") - void getGraphQL(HttpServletRequest request, HttpServletResponse response) throws HttpRequestMethodNotSupportedException { + void getGraphQL(HttpServletRequest request, HttpServletResponse response) + throws HttpRequestMethodNotSupportedException { log.info("GET on GraphQL API is not supported"); throw new HttpRequestMethodNotSupportedException("GET"); } private void observeErrors(ExecutionResult executionResult) { - executionResult.getErrors().forEach(graphQLError -> { - if (graphQLError instanceof DataHubGraphQLError) { - DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; - int errorCode = dhGraphQLError.getErrorCode(); - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorCode", Integer.toString(errorCode))).inc(); - } else { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "errorType", graphQLError.getErrorType().toString())).inc(); - } - }); + executionResult + .getErrors() + .forEach( + graphQLError -> { + if (graphQLError instanceof DataHubGraphQLError) { + DataHubGraphQLError dhGraphQLError = (DataHubGraphQLError) graphQLError; + int errorCode = dhGraphQLError.getErrorCode(); + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorCode", Integer.toString(errorCode))) + .inc(); + } else { + MetricUtils.get() + .counter( + MetricRegistry.name( + this.getClass(), "errorType", graphQLError.getErrorType().toString())) + .inc(); + } + }); if (executionResult.getErrors().size() != 0) { MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "error")).inc(); } @@ -162,14 +180,22 @@ private void submitMetrics(ExecutionResult executionResult) { long totalDuration = TimeUnit.NANOSECONDS.toMillis((long) tracingMap.get("duration")); Map executionData = (Map) tracingMap.get("execution"); // Extract top level resolver, parent is top level query. Assumes single query per call. - List> resolvers = (List>) executionData.get("resolvers"); - Optional> - parentResolver = resolvers.stream().filter(resolver -> resolver.get("parentType").equals("Query")).findFirst(); - String fieldName = parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; - MetricUtils.get().histogram(MetricRegistry.name(this.getClass(), fieldName)).update(totalDuration); + List> resolvers = + (List>) executionData.get("resolvers"); + Optional> parentResolver = + resolvers.stream() + .filter(resolver -> resolver.get("parentType").equals("Query")) + .findFirst(); + String fieldName = + parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; + MetricUtils.get() + .histogram(MetricRegistry.name(this.getClass(), fieldName)) + .update(totalDuration); } } catch (Exception e) { - MetricUtils.get().counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")).inc(); + MetricUtils.get() + .counter(MetricRegistry.name(this.getClass(), "submitMetrics", "exception")) + .inc(); log.error("Unable to submit metrics for GraphQL call.", e); } } diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java index 6dd71d84d6dc3..35636bf07eb10 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphiQLController.java @@ -1,5 +1,7 @@ package com.datahub.graphql; +import static java.nio.charset.StandardCharsets.*; + import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; @@ -14,9 +16,6 @@ import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.ResponseBody; -import static java.nio.charset.StandardCharsets.*; - - @Slf4j @Controller public class GraphiQLController { @@ -37,4 +36,4 @@ public GraphiQLController() { CompletableFuture graphiQL() { return CompletableFuture.supplyAsync(() -> this.graphiqlHtml); } -} \ No newline at end of file +} diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java index a1ddc5a013f7d..379521eda0c1a 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java @@ -4,14 +4,16 @@ import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; - public class SpringQueryContext implements QueryContext { private final boolean isAuthenticated; private final Authentication authentication; private final Authorizer authorizer; - public SpringQueryContext(final boolean isAuthenticated, final Authentication authentication, final Authorizer authorizer) { + public SpringQueryContext( + final boolean isAuthenticated, + final Authentication authentication, + final Authorizer authorizer) { this.isAuthenticated = isAuthenticated; this.authentication = authentication; this.authorizer = authorizer; diff --git a/metadata-service/openapi-analytics-servlet/build.gradle b/metadata-service/openapi-analytics-servlet/build.gradle index 6475d215db5f5..8ecd48a03e09d 100644 --- a/metadata-service/openapi-analytics-servlet/build.gradle +++ b/metadata-service/openapi-analytics-servlet/build.gradle @@ -63,5 +63,3 @@ task openApiGenerate(type: GenerateSwaggerCode) { ] } tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java index 7816e81fe4a6d..4322dc08887a5 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/config/OpenapiAnalyticsConfig.java @@ -7,8 +7,8 @@ @Configuration public class OpenapiAnalyticsConfig { - @Bean - public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { - return new DatahubUsageEventsImpl(); - } + @Bean + public DatahubUsageEventsApiDelegate datahubUsageEventsApiDelegate() { + return new DatahubUsageEventsImpl(); + } } diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java index 99e47f32555df..0cedfc22ded6b 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java @@ -1,48 +1,50 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.ResponseEntity; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.AuthorizerChain; -import org.springframework.beans.factory.annotation.Value; import com.google.common.collect.ImmutableList; -import io.datahubproject.openapi.exception.UnauthorizedException; -import com.datahub.authorization.AuthUtil; import com.linkedin.metadata.authorization.PoliciesConfig; - -import java.util.Optional; +import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; +import io.datahubproject.openapi.exception.UnauthorizedException; +import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; import java.util.Objects; +import java.util.Optional; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.ResponseEntity; public class DatahubUsageEventsImpl implements DatahubUsageEventsApiDelegate { - @Autowired - private ElasticSearchService _searchService; - @Autowired - private AuthorizerChain _authorizationChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean _restApiAuthorizationEnabled; + @Autowired private ElasticSearchService _searchService; + @Autowired private AuthorizerChain _authorizationChain; - final public static String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + @Value("${authorization.restApiAuthorization:false}") + private boolean _restApiAuthorizationEnabled; - @Override - public ResponseEntity raw(String body) { - Authentication authentication = AuthenticationContext.getAuthentication(); - checkAnalyticsAuthorized(authentication); - return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); - } + public static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; + + @Override + public ResponseEntity raw(String body) { + Authentication authentication = AuthenticationContext.getAuthentication(); + checkAnalyticsAuthorized(authentication); + return ResponseEntity.of(_searchService.raw(DATAHUB_USAGE_INDEX, body).map(Objects::toString)); + } - private void checkAnalyticsAuthorized(Authentication authentication) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); + private void checkAnalyticsAuthorized(Authentication authentication) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); - } + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); } + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java index 83b1b3f87c724..eebef4c07f7b2 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIAnalyticsTestConfiguration.java @@ -1,5 +1,10 @@ package io.datahubproject.openapi.config; +import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -7,42 +12,36 @@ import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; -import org.opensearch.action.search.SearchResponse; +import java.io.IOException; +import java.util.Optional; import org.mockito.Mockito; +import org.opensearch.action.search.SearchResponse; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Primary; -import java.io.IOException; -import java.util.Optional; - -import static io.datahubproject.openapi.delegates.DatahubUsageEventsImpl.DATAHUB_USAGE_INDEX; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIAnalyticsTestConfiguration { - @Bean - @Primary - public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { - ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); - SearchResponse mockResp = mock(SearchResponse.class); - when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) - .thenReturn(Optional.of(mockResp)); - return elasticSearchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } + @Bean + @Primary + public ElasticSearchService datahubUsageEventsApiDelegate() throws IOException { + ElasticSearchService elasticSearchService = mock(ElasticSearchService.class); + SearchResponse mockResp = mock(SearchResponse.class); + when(elasticSearchService.raw(eq(DATAHUB_USAGE_INDEX), anyString())) + .thenReturn(Optional.of(mockResp)); + return elasticSearchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } } diff --git a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java index af2a24391fea8..d445f321132ef 100644 --- a/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java +++ b/metadata-service/openapi-analytics-servlet/src/test/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import io.datahubproject.openapi.config.OpenAPIAnalyticsTestConfiguration; import io.datahubproject.openapi.config.SpringWebConfig; @@ -14,31 +17,27 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertNotNull; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({DatahubUsageEventsImpl.class, OpenAPIAnalyticsTestConfiguration.class}) public class DatahubUsageEventsImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } - @Autowired - private DatahubUsageEventsApiController analyticsController; + @Autowired private DatahubUsageEventsApiController analyticsController; - @Test - public void initTest() { - assertNotNull(analyticsController); - } + @Test + public void initTest() { + assertNotNull(analyticsController); + } - @Test - public void analyticsControllerTest() { - ResponseEntity resp = analyticsController.raw(""); - assertEquals(resp.getStatusCode(), HttpStatus.OK); - } + @Test + public void analyticsControllerTest() { + ResponseEntity resp = analyticsController.raw(""); + assertEquals(resp.getStatusCode(), HttpStatus.OK); + } } diff --git a/metadata-service/openapi-entity-servlet/build.gradle b/metadata-service/openapi-entity-servlet/build.gradle index 7f9c472b91fac..dbec469085b07 100644 --- a/metadata-service/openapi-entity-servlet/build.gradle +++ b/metadata-service/openapi-entity-servlet/build.gradle @@ -77,6 +77,4 @@ task openApiGenerate(type: GenerateSwaggerCode, dependsOn: [mergeApiComponents, 'delegatePattern' : "false" ] } -tasks.getByName("compileJava").dependsOn(openApiGenerate) - -checkstyleMain.exclude '**/generated/**' \ No newline at end of file +tasks.getByName("compileJava").dependsOn(openApiGenerate) \ No newline at end of file diff --git a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java index ef36d8aa38785..2cd2935496898 100644 --- a/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java +++ b/metadata-service/openapi-entity-servlet/generators/src/main/java/io/datahubproject/CustomSpringCodegen.java @@ -1,43 +1,41 @@ package io.datahubproject; import io.swagger.codegen.v3.generators.java.SpringCodegen; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.Map; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CustomSpringCodegen extends SpringCodegen { - public CustomSpringCodegen() { - super(); - } + public CustomSpringCodegen() { + super(); + } - @Override - public String getName() { - return "custom-spring"; - } + @Override + public String getName() { + return "custom-spring"; + } - @Override - public Map postProcessOperations(Map objs) { - Map result = super.postProcessOperations(objs); - List> imports = (List) objs.get("imports"); + @Override + public Map postProcessOperations(Map objs) { + Map result = super.postProcessOperations(objs); + List> imports = (List) objs.get("imports"); - for (Map importMap : imports) { - for (String type : importMap.values()) { - if (type.contains("EntityRequest") && !type.contains(".Scroll")) { - additionalProperties.put("requestClass", type); - } - if (type.contains("EntityResponse") && !type.contains(".Scroll")) { - additionalProperties.put("responseClass", type); - } - if (type.contains("EntityResponse") && type.contains(".Scroll")) { - additionalProperties.put("scrollResponseClass", type); - } - } + for (Map importMap : imports) { + for (String type : importMap.values()) { + if (type.contains("EntityRequest") && !type.contains(".Scroll")) { + additionalProperties.put("requestClass", type); } - - return result; + if (type.contains("EntityResponse") && !type.contains(".Scroll")) { + additionalProperties.put("responseClass", type); + } + if (type.contains("EntityResponse") && type.contains(".Scroll")) { + additionalProperties.put("scrollResponseClass", type); + } + } } + + return result; + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java index 207c2284e2673..31cd3e6c69e50 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/delegates/EntityApiDelegateImpl.java @@ -1,9 +1,18 @@ package io.datahubproject.openapi.delegates; -import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.entity.EntityService; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.AuthorizerChain; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.datahub.authorization.EntitySpec; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; @@ -13,7 +22,6 @@ import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; -import com.datahub.authorization.AuthorizerChain; import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.generated.BrowsePathsV2AspectRequestV2; import io.datahubproject.openapi.generated.BrowsePathsV2AspectResponseV2; @@ -43,18 +51,6 @@ import io.datahubproject.openapi.generated.StatusAspectRequestV2; import io.datahubproject.openapi.generated.StatusAspectResponseV2; import io.datahubproject.openapi.util.OpenApiEntitiesUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.linkedin.metadata.authorization.PoliciesConfig; -import com.google.common.collect.ImmutableList; -import com.datahub.authorization.AuthUtil; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; - -import javax.validation.Valid; -import javax.validation.constraints.Min; import java.net.URISyntaxException; import java.util.List; import java.util.Map; @@ -62,544 +58,678 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import javax.validation.Valid; +import javax.validation.constraints.Min; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; public class EntityApiDelegateImpl { - final private EntityRegistry _entityRegistry; - final private EntityService _entityService; - final private SearchService _searchService; - final private EntitiesController _v1Controller; - final private AuthorizerChain _authorizationChain; - - final private boolean _restApiAuthorizationEnabled; - final private Class _reqClazz; - final private Class _respClazz; - final private Class _scrollRespClazz; - - final private StackWalker walker = StackWalker.getInstance(); - - public EntityApiDelegateImpl(EntityService entityService, SearchService searchService, EntitiesController entitiesController, - boolean restApiAuthorizationEnabled, AuthorizerChain authorizationChain, - Class reqClazz, Class respClazz, Class scrollRespClazz) { - this._entityService = entityService; - this._searchService = searchService; - this._entityRegistry = entityService.getEntityRegistry(); - this._v1Controller = entitiesController; - this._authorizationChain = authorizationChain; - this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; - this._reqClazz = reqClazz; - this._respClazz = respClazz; - this._scrollRespClazz = scrollRespClazz; - } - - public ResponseEntity get(String urn, Boolean systemMetadata, List aspects) { - String[] requestedAspects = Optional.ofNullable(aspects).map(asp -> asp.stream().distinct().toArray(String[]::new)).orElse(null); - ResponseEntity result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertEntity(Optional.ofNullable(result) - .map(HttpEntity::getBody).orElse(null), _respClazz, systemMetadata)); - } - - public ResponseEntity> create(List body) { - List aspects = body.stream() - .flatMap(b -> OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry).stream()) - .collect(Collectors.toList()); - _v1Controller.postEntities(aspects); - List responses = body.stream() - .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) - .collect(Collectors.toList()); - return ResponseEntity.ok(responses); - } - - public ResponseEntity delete(String urn) { - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity head(String urn) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public ResponseEntity getAspect(String urn, Boolean systemMetadata, String aspect, Class entityRespClass, - Class aspectRespClazz) { - String[] requestedAspects = new String[]{aspect}; - ResponseEntity result = _v1Controller.getEntities(new String[]{urn}, requestedAspects); - return ResponseEntity.of(OpenApiEntitiesUtil.convertAspect(result.getBody(), aspect, entityRespClass, aspectRespClazz, - systemMetadata)); - } - - public ResponseEntity createAspect(String urn, String aspectName, AQ body, Class reqClazz, Class respClazz) { - UpsertAspectRequest aspectUpsert = OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); - _v1Controller.postEntities(Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); - AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); - return ResponseEntity.ok(response); - } - - public ResponseEntity headAspect(String urn, String aspect) { - try { - Urn entityUrn = Urn.createFromString(urn); - if (_entityService.exists(entityUrn, aspect)) { - return new ResponseEntity<>(HttpStatus.NO_CONTENT); - } else { - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - } - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - - public ResponseEntity deleteAspect(String urn, String aspect) { - _entityService.deleteAspect(urn, aspect, Map.of(), false); - _v1Controller.deleteEntities(new String[]{urn}, false); - return new ResponseEntity<>(HttpStatus.OK); - } - - public ResponseEntity createDomains(DomainsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DomainsAspectRequestV2.class, DomainsAspectResponseV2.class); - } - - public ResponseEntity createGlobalTags(GlobalTagsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlobalTagsAspectRequestV2.class, GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity createGlossaryTerms(GlossaryTermsAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, GlossaryTermsAspectRequestV2.class, GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity createOwnership(OwnershipAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, OwnershipAspectRequestV2.class, OwnershipAspectResponseV2.class); - } - - public ResponseEntity createStatus(StatusAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, StatusAspectRequestV2.class, StatusAspectResponseV2.class); - } - - public ResponseEntity deleteDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity getDomains(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DomainsAspectResponseV2.class); - } - - public ResponseEntity getGlobalTags(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlobalTagsAspectResponseV2.class); - } - - public ResponseEntity getGlossaryTerms(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - GlossaryTermsAspectResponseV2.class); - } - - public ResponseEntity getOwnership(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - OwnershipAspectResponseV2.class); - } - - public ResponseEntity getStatus(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - StatusAspectResponseV2.class); - } - - public ResponseEntity headDomains(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headGlobalTags(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headGlossaryTerms(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headOwnership(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headStatus(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - protected static String methodNameToAspectName(String methodName) { - return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); - } - - public ResponseEntity deleteDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity getDeprecation(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity headDeprecation(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity createDeprecation(@Valid DeprecationAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DeprecationAspectRequestV2.class, - DeprecationAspectResponseV2.class); - } - - public ResponseEntity headBrowsePathsV2(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity getBrowsePathsV2(String urn, @Valid Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity createBrowsePathsV2(@Valid BrowsePathsV2AspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, BrowsePathsV2AspectRequestV2.class, - BrowsePathsV2AspectResponseV2.class); - } - - public ResponseEntity scroll(@Valid Boolean systemMetadata, @Valid List aspects, @Min(1) @Valid Integer count, - @Valid String scrollId, @Valid List sort, @Valid SortOrder sortOrder, @Valid String query) { - - Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); - checkScrollAuthorized(authentication, entitySpec); - - // TODO multi-field sort - SortCriterion sortCriterion = new SortCriterion(); - sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); - sortCriterion.setOrder(com.linkedin.metadata.query.filter.SortOrder.valueOf(Optional.ofNullable(sortOrder) - .map(Enum::name).orElse("ASCENDING"))); - - SearchFlags searchFlags = new SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true); - - ScrollResult result = _searchService.scrollAcrossEntities( - List.of(entitySpec.getName()), - query, null, sortCriterion, scrollId, null, count, searchFlags); - - String[] urns = result.getEntities().stream() - .map(SearchEntity::getEntity) - .map(Urn::toString) - .toArray(String[]::new); - String[] requestedAspects = Optional.ofNullable(aspects) - .map(asp -> asp.stream().distinct().toArray(String[]::new)) - .orElse(null); - List entities = Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) - .map(body -> body.getResponses().entrySet()) - .map(entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) - .orElse(List.of()); - - return ResponseEntity.of(OpenApiEntitiesUtil.convertToScrollResponse(_scrollRespClazz, result.getScrollId(), entities)); - } - - private void checkScrollAuthorized(Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); - - List> resourceSpecs = List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); - if (_restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); - } - } - - public ResponseEntity createDatasetProperties(@Valid DatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DatasetPropertiesAspectRequestV2.class, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity createEditableDatasetProperties( - @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableDatasetPropertiesAspectRequestV2.class, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity createInstitutionalMemory( - @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, InstitutionalMemoryAspectRequestV2.class, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity createChartInfo(@Valid ChartInfoAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, ChartInfoAspectRequestV2.class, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity createEditableChartProperties( - @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, EditableChartPropertiesAspectRequestV2.class, - EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity createDataProductProperties( - @Valid DataProductPropertiesAspectRequestV2 body, String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return createAspect(urn, methodNameToAspectName(methodName), body, DataProductPropertiesAspectRequestV2.class, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity deleteDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity getDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity getEditableDatasetProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - EditableDatasetPropertiesAspectResponseV2.class); - } - - public ResponseEntity getInstitutionalMemory(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - InstitutionalMemoryAspectResponseV2.class); - } - - public ResponseEntity getEditableChartProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, EditableChartPropertiesAspectResponseV2.class); - } - - public ResponseEntity getChartInfo(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - ChartInfoAspectResponseV2.class); - } - - public ResponseEntity getDataProductProperties(String urn, Boolean systemMetadata) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return getAspect(urn, systemMetadata, methodNameToAspectName(methodName), _respClazz, - DataProductPropertiesAspectResponseV2.class); - } - - public ResponseEntity headDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headEditableDatasetProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headInstitutionalMemory(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity headChartInfo(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return headAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteEditableChartProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } - - public ResponseEntity deleteDataProductProperties(String urn) { - String methodName = walker.walk(frames -> frames - .findFirst() - .map(StackWalker.StackFrame::getMethodName)).get(); - return deleteAspect(urn, methodNameToAspectName(methodName)); - } + private final EntityRegistry _entityRegistry; + private final EntityService _entityService; + private final SearchService _searchService; + private final EntitiesController _v1Controller; + private final AuthorizerChain _authorizationChain; + + private final boolean _restApiAuthorizationEnabled; + private final Class _reqClazz; + private final Class _respClazz; + private final Class _scrollRespClazz; + + private final StackWalker walker = StackWalker.getInstance(); + + public EntityApiDelegateImpl( + EntityService entityService, + SearchService searchService, + EntitiesController entitiesController, + boolean restApiAuthorizationEnabled, + AuthorizerChain authorizationChain, + Class reqClazz, + Class respClazz, + Class scrollRespClazz) { + this._entityService = entityService; + this._searchService = searchService; + this._entityRegistry = entityService.getEntityRegistry(); + this._v1Controller = entitiesController; + this._authorizationChain = authorizationChain; + this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; + this._reqClazz = reqClazz; + this._respClazz = respClazz; + this._scrollRespClazz = scrollRespClazz; + } + + public ResponseEntity get(String urn, Boolean systemMetadata, List aspects) { + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + ResponseEntity result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertEntity( + Optional.ofNullable(result).map(HttpEntity::getBody).orElse(null), + _respClazz, + systemMetadata)); + } + + public ResponseEntity> create(List body) { + List aspects = + body.stream() + .flatMap( + b -> + OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry) + .stream()) + .collect(Collectors.toList()); + _v1Controller.postEntities(aspects); + List responses = + body.stream() + .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) + .collect(Collectors.toList()); + return ResponseEntity.ok(responses); + } + + public ResponseEntity delete(String urn) { + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity head(String urn) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public ResponseEntity getAspect( + String urn, + Boolean systemMetadata, + String aspect, + Class entityRespClass, + Class aspectRespClazz) { + String[] requestedAspects = new String[] {aspect}; + ResponseEntity result = + _v1Controller.getEntities(new String[] {urn}, requestedAspects); + return ResponseEntity.of( + OpenApiEntitiesUtil.convertAspect( + result.getBody(), aspect, entityRespClass, aspectRespClazz, systemMetadata)); + } + + public ResponseEntity createAspect( + String urn, String aspectName, AQ body, Class reqClazz, Class respClazz) { + UpsertAspectRequest aspectUpsert = + OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); + _v1Controller.postEntities( + Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList())); + AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); + return ResponseEntity.ok(response); + } + + public ResponseEntity headAspect(String urn, String aspect) { + try { + Urn entityUrn = Urn.createFromString(urn); + if (_entityService.exists(entityUrn, aspect)) { + return new ResponseEntity<>(HttpStatus.NO_CONTENT); + } else { + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + } + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public ResponseEntity deleteAspect(String urn, String aspect) { + _entityService.deleteAspect(urn, aspect, Map.of(), false); + _v1Controller.deleteEntities(new String[] {urn}, false); + return new ResponseEntity<>(HttpStatus.OK); + } + + public ResponseEntity createDomains( + DomainsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DomainsAspectRequestV2.class, + DomainsAspectResponseV2.class); + } + + public ResponseEntity createGlobalTags( + GlobalTagsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlobalTagsAspectRequestV2.class, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity createGlossaryTerms( + GlossaryTermsAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + GlossaryTermsAspectRequestV2.class, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity createOwnership( + OwnershipAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + OwnershipAspectRequestV2.class, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity createStatus( + StatusAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + StatusAspectRequestV2.class, + StatusAspectResponseV2.class); + } + + public ResponseEntity deleteDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getDomains(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DomainsAspectResponseV2.class); + } + + public ResponseEntity getGlobalTags( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlobalTagsAspectResponseV2.class); + } + + public ResponseEntity getGlossaryTerms( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + GlossaryTermsAspectResponseV2.class); + } + + public ResponseEntity getOwnership( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + OwnershipAspectResponseV2.class); + } + + public ResponseEntity getStatus(String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + StatusAspectResponseV2.class); + } + + public ResponseEntity headDomains(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headGlobalTags(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headGlossaryTerms(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headOwnership(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headStatus(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + protected static String methodNameToAspectName(String methodName) { + return toLowerFirst(methodName.replaceFirst("^(get|head|delete|create)", "")); + } + + public ResponseEntity deleteDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getDeprecation( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity headDeprecation(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity createDeprecation( + @Valid DeprecationAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DeprecationAspectRequestV2.class, + DeprecationAspectResponseV2.class); + } + + public ResponseEntity headBrowsePathsV2(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getBrowsePathsV2( + String urn, @Valid Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity createBrowsePathsV2( + @Valid BrowsePathsV2AspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + BrowsePathsV2AspectRequestV2.class, + BrowsePathsV2AspectResponseV2.class); + } + + public ResponseEntity scroll( + @Valid Boolean systemMetadata, + @Valid List aspects, + @Min(1) @Valid Integer count, + @Valid String scrollId, + @Valid List sort, + @Valid SortOrder sortOrder, + @Valid String query) { + + Authentication authentication = AuthenticationContext.getAuthentication(); + com.linkedin.metadata.models.EntitySpec entitySpec = + OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); + checkScrollAuthorized(authentication, entitySpec); + + // TODO multi-field sort + SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(Optional.ofNullable(sort).map(s -> s.get(0)).orElse("urn")); + sortCriterion.setOrder( + com.linkedin.metadata.query.filter.SortOrder.valueOf( + Optional.ofNullable(sortOrder).map(Enum::name).orElse("ASCENDING"))); + + SearchFlags searchFlags = + new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + + ScrollResult result = + _searchService.scrollAcrossEntities( + List.of(entitySpec.getName()), + query, + null, + sortCriterion, + scrollId, + null, + count, + searchFlags); + + String[] urns = + result.getEntities().stream() + .map(SearchEntity::getEntity) + .map(Urn::toString) + .toArray(String[]::new); + String[] requestedAspects = + Optional.ofNullable(aspects) + .map(asp -> asp.stream().distinct().toArray(String[]::new)) + .orElse(null); + List entities = + Optional.ofNullable(_v1Controller.getEntities(urns, requestedAspects).getBody()) + .map(body -> body.getResponses().entrySet()) + .map( + entries -> OpenApiEntitiesUtil.convertEntities(entries, _respClazz, systemMetadata)) + .orElse(List.of()); + + return ResponseEntity.of( + OpenApiEntitiesUtil.convertToScrollResponse( + _scrollRespClazz, result.getScrollId(), entities)); + } + + private void checkScrollAuthorized( + Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List> resourceSpecs = + List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); + if (_restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); + } + } + + public ResponseEntity createDatasetProperties( + @Valid DatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DatasetPropertiesAspectRequestV2.class, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity createEditableDatasetProperties( + @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableDatasetPropertiesAspectRequestV2.class, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity createInstitutionalMemory( + @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + InstitutionalMemoryAspectRequestV2.class, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity createChartInfo( + @Valid ChartInfoAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + ChartInfoAspectRequestV2.class, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity createEditableChartProperties( + @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + EditableChartPropertiesAspectRequestV2.class, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity createDataProductProperties( + @Valid DataProductPropertiesAspectRequestV2 body, String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return createAspect( + urn, + methodNameToAspectName(methodName), + body, + DataProductPropertiesAspectRequestV2.class, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity deleteDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity getDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity getEditableDatasetProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableDatasetPropertiesAspectResponseV2.class); + } + + public ResponseEntity getInstitutionalMemory( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + InstitutionalMemoryAspectResponseV2.class); + } + + public ResponseEntity getEditableChartProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + EditableChartPropertiesAspectResponseV2.class); + } + + public ResponseEntity getChartInfo( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + ChartInfoAspectResponseV2.class); + } + + public ResponseEntity getDataProductProperties( + String urn, Boolean systemMetadata) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return getAspect( + urn, + systemMetadata, + methodNameToAspectName(methodName), + _respClazz, + DataProductPropertiesAspectResponseV2.class); + } + + public ResponseEntity headDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headEditableDatasetProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headInstitutionalMemory(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity headChartInfo(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return headAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteEditableChartProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } + + public ResponseEntity deleteDataProductProperties(String urn) { + String methodName = + walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); + return deleteAspect(urn, methodNameToAspectName(methodName)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java index 205d401dd956d..317f9311003e5 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/util/OpenApiEntitiesUtil.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.util; +import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; + import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -9,8 +12,6 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.generated.OneOfGenericAspectValue; import io.datahubproject.openapi.generated.SystemMetadata; -import lombok.extern.slf4j.Slf4j; - import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; @@ -20,260 +21,338 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; - -import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class OpenApiEntitiesUtil { - private final static String MODEL_VERSION = "V2"; - private final static String REQUEST_SUFFIX = "Request" + MODEL_VERSION; - private final static String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; - - private final static String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; - private final static String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; - private final static String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; - private final static String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + private static final String MODEL_VERSION = "V2"; + private static final String REQUEST_SUFFIX = "Request" + MODEL_VERSION; + private static final String RESPONSE_SUFFIX = "Response" + MODEL_VERSION; + + private static final String ASPECT_REQUEST_SUFFIX = "Aspect" + REQUEST_SUFFIX; + private static final String ASPECT_RESPONSE_SUFFIX = "Aspect" + RESPONSE_SUFFIX; + private static final String ENTITY_REQUEST_SUFFIX = "Entity" + REQUEST_SUFFIX; + private static final String ENTITY_RESPONSE_SUFFIX = "Entity" + RESPONSE_SUFFIX; + + private OpenApiEntitiesUtil() {} + + private static final ReflectionCache REFLECT = + ReflectionCache.builder().basePackage("io.datahubproject.openapi.generated").build(); + + public static UpsertAspectRequest convertAspectToUpsert( + String entityUrn, Object aspectRequest, Class aspectRequestClazz) { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); + builder.entityType(Urn.createFromString(entityUrn).getEntityType()); + builder.entityUrn(entityUrn); + + // i.e. GlobalTagsAspectRequestV2 + if (aspectRequest != null) { + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { + builder.aspect((OneOfGenericAspectValue) aspect); + return builder.build(); + } + } - private OpenApiEntitiesUtil() { + return null; + } catch (Exception e) { + log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); + throw new RuntimeException(e); } - - private final static ReflectionCache REFLECT = ReflectionCache.builder() - .basePackage("io.datahubproject.openapi.generated") - .build(); - - - public static UpsertAspectRequest convertAspectToUpsert(String entityUrn, Object aspectRequest, Class aspectRequestClazz) { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(Urn.createFromString(entityUrn).getEntityType()); - builder.entityUrn(entityUrn); - - // i.e. GlobalTagsAspectRequestV2 - if (aspectRequest != null) { - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { + } + + public static List convertEntityToUpsert( + Object openapiEntity, Class fromClazz, EntityRegistry entityRegistry) { + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); + + return entitySpec.getAspectSpecs().stream() + .map( + aspectSpec -> { + try { + UpsertAspectRequest.UpsertAspectRequestBuilder builder = + UpsertAspectRequest.builder(); + builder.entityType(entitySpec.getName()); + builder.entityUrn( + (String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); + + String upperAspectName = toUpperFirst(aspectSpec.getName()); + Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); + + // i.e. GlobalTagsAspectRequestV2 + Object aspectRequest = + aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); + if (aspectRequest != null) { + Class aspectRequestClazz = + REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); + + // i.e. GlobalTags + Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); + Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); + + if (aspect != null) { builder.aspect((OneOfGenericAspectValue) aspect); return builder.build(); + } } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting urn: {} aspect: {}", entityUrn, aspectRequestClazz.getName()); - throw new RuntimeException(e); - } - } - public static List convertEntityToUpsert(Object openapiEntity, Class fromClazz, EntityRegistry entityRegistry) { - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, fromClazz); - - return entitySpec.getAspectSpecs().stream() - .map(aspectSpec -> { - try { - UpsertAspectRequest.UpsertAspectRequestBuilder builder = UpsertAspectRequest.builder(); - builder.entityType(entitySpec.getName()); - builder.entityUrn((String) REFLECT.lookupMethod(fromClazz, "getUrn").invoke(openapiEntity)); - - String upperAspectName = toUpperFirst(aspectSpec.getName()); - Method aspectMethod = REFLECT.lookupMethod(fromClazz, "get" + upperAspectName); - - // i.e. GlobalTagsAspectRequestV2 - Object aspectRequest = aspectMethod == null ? null : aspectMethod.invoke(openapiEntity); - if (aspectRequest != null) { - Class aspectRequestClazz = REFLECT.lookupClass(upperAspectName + ASPECT_REQUEST_SUFFIX); - - // i.e. GlobalTags - Method valueMethod = REFLECT.lookupMethod(aspectRequestClazz, "getValue"); - Object aspect = valueMethod == null ? null : valueMethod.invoke(aspectRequest); - - if (aspect != null) { - builder.aspect((OneOfGenericAspectValue) aspect); - return builder.build(); - } - } - - return null; - } catch (Exception e) { - log.error("Error reflecting entity: {} aspect: {}", entitySpec.getName(), aspectSpec.getName()); - throw new RuntimeException(e); - } - }).filter(Objects::nonNull).collect(Collectors.toList()); - } - public static Optional convertAspect(UrnResponseMap urnResponseMap, String aspectName, Class entityClazz, - Class aspectClazz, boolean withSystemMetadata) { - return convertEntity(urnResponseMap, entityClazz, withSystemMetadata).map(entity -> { - try { - Method aspectMethod = REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); + return null; + } catch (Exception e) { + log.error( + "Error reflecting entity: {} aspect: {}", + entitySpec.getName(), + aspectSpec.getName()); + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + public static Optional convertAspect( + UrnResponseMap urnResponseMap, + String aspectName, + Class entityClazz, + Class aspectClazz, + boolean withSystemMetadata) { + return convertEntity(urnResponseMap, entityClazz, withSystemMetadata) + .map( + entity -> { + try { + Method aspectMethod = + REFLECT.lookupMethod(entityClazz, "get" + toUpperFirst(aspectName)); return aspectMethod == null ? null : aspectClazz.cast(aspectMethod.invoke(entity)); - } catch (IllegalAccessException | InvocationTargetException e) { + } catch (IllegalAccessException | InvocationTargetException e) { throw new RuntimeException(e); - } - }); - - } - - public static Optional convertEntity(UrnResponseMap urnResponseMap, Class toClazz, boolean withSystemMetadata) { - return Optional.ofNullable(urnResponseMap) - .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) - .flatMap(entry -> convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); - } - - public static List convertEntities(Set> entityResponseSet, Class toClazz, boolean withSystemMetadata) { - if (entityResponseSet != null) { - return entityResponseSet.stream().map(entry -> { + } + }); + } + + public static Optional convertEntity( + UrnResponseMap urnResponseMap, Class toClazz, boolean withSystemMetadata) { + return Optional.ofNullable(urnResponseMap) + .flatMap(respMap -> respMap.getResponses().entrySet().stream().findFirst()) + .flatMap( + entry -> + convertEntities(Set.of(entry), toClazz, withSystemMetadata).stream().findFirst()); + } + + public static List convertEntities( + Set> entityResponseSet, + Class toClazz, + boolean withSystemMetadata) { + if (entityResponseSet != null) { + return entityResponseSet.stream() + .map( + entry -> { try { - // i.e. DataContractEntityResponseV2.Builder - Pair, Object> builderPair = REFLECT.getBuilder(toClazz); - Set builderMethods = Arrays.stream(builderPair.getFirst().getMethods()) - .map(Method::getName).collect(Collectors.toSet()); - - REFLECT.lookupMethod(builderPair, "urn", String.class).invoke(builderPair.getSecond(), entry.getKey()); - - entry.getValue().getAspects().entrySet().forEach(aspectEntry -> { - try { - if (builderMethods.contains(aspectEntry.getKey())) { + // i.e. DataContractEntityResponseV2.Builder + Pair, Object> builderPair = REFLECT.getBuilder(toClazz); + Set builderMethods = + Arrays.stream(builderPair.getFirst().getMethods()) + .map(Method::getName) + .collect(Collectors.toSet()); + + REFLECT + .lookupMethod(builderPair, "urn", String.class) + .invoke(builderPair.getSecond(), entry.getKey()); + + entry + .getValue() + .getAspects() + .entrySet() + .forEach( + aspectEntry -> { + try { + if (builderMethods.contains(aspectEntry.getKey())) { String upperFirstAspect = toUpperFirst(aspectEntry.getKey()); Class aspectClazz = REFLECT.lookupClass(upperFirstAspect); - Class aspectRespClazz = REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); - Class aspectRespClazzBuilder = REFLECT.lookupClass(String.join("", - upperFirstAspect, ASPECT_RESPONSE_SUFFIX, - "$", upperFirstAspect, ASPECT_RESPONSE_SUFFIX, "Builder")); - Object aspectBuilder = REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); - - REFLECT.lookupMethod(aspectRespClazzBuilder, "value", aspectClazz).invoke(aspectBuilder, aspectEntry.getValue().getValue()); + Class aspectRespClazz = + REFLECT.lookupClass(upperFirstAspect + ASPECT_RESPONSE_SUFFIX); + Class aspectRespClazzBuilder = + REFLECT.lookupClass( + String.join( + "", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "$", + upperFirstAspect, + ASPECT_RESPONSE_SUFFIX, + "Builder")); + Object aspectBuilder = + REFLECT.lookupMethod(aspectRespClazz, "builder").invoke(null); + + REFLECT + .lookupMethod(aspectRespClazzBuilder, "value", aspectClazz) + .invoke(aspectBuilder, aspectEntry.getValue().getValue()); if (withSystemMetadata) { - REFLECT.lookupMethod(aspectRespClazzBuilder, "systemMetadata", SystemMetadata.class) - .invoke(aspectBuilder, aspectEntry.getValue().getSystemMetadata()); + REFLECT + .lookupMethod( + aspectRespClazzBuilder, + "systemMetadata", + SystemMetadata.class) + .invoke( + aspectBuilder, + aspectEntry.getValue().getSystemMetadata()); } - REFLECT.lookupMethod(builderPair, aspectEntry.getKey(), aspectRespClazz).invoke(builderPair.getSecond(), - REFLECT.lookupMethod(aspectRespClazzBuilder, "build").invoke(aspectBuilder)); + REFLECT + .lookupMethod( + builderPair, aspectEntry.getKey(), aspectRespClazz) + .invoke( + builderPair.getSecond(), + REFLECT + .lookupMethod(aspectRespClazzBuilder, "build") + .invoke(aspectBuilder)); + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); } - } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); - } - }); + }); - return toClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + return toClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); } catch (IllegalAccessException | InvocationTargetException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); } - }).collect(Collectors.toList()); - } - return List.of(); + }) + .collect(Collectors.toList()); } - - public static T convertToResponseAspect(I source, Class targetClazz) { - if (source != null) { - try { - Class sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); - Object aspect = valueMethod.invoke(source); - - Pair, Object> builderPair = REFLECT.getBuilder(targetClazz); - REFLECT.lookupMethod(builderPair, "value", valueMethod.getReturnType()).invoke(builderPair.getSecond(), aspect); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return List.of(); + } + + public static T convertToResponseAspect(I source, Class targetClazz) { + if (source != null) { + try { + Class sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Method valueMethod = REFLECT.lookupMethod(sourceClazz, "getValue"); + Object aspect = valueMethod.invoke(source); + + Pair, Object> builderPair = REFLECT.getBuilder(targetClazz); + REFLECT + .lookupMethod(builderPair, "value", valueMethod.getReturnType()) + .invoke(builderPair.getSecond(), aspect); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static T convertToResponse(I source, Class targetClazz, EntityRegistry entityRegistry) { - if (source != null) { - try { - Class sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); - Pair, Object> builderPair = REFLECT.getBuilder(targetClazz); - copy(Pair.of(sourceClazz, source), builderPair, "urn"); - - final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); - entitySpec.getAspectSpecs().stream() - .forEach(aspectSpec -> { - try { - copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - - return targetClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return null; + return null; + } + + public static T convertToResponse( + I source, Class targetClazz, EntityRegistry entityRegistry) { + if (source != null) { + try { + Class sourceClazz = REFLECT.lookupClass(source.getClass().getSimpleName()); + Pair, Object> builderPair = REFLECT.getBuilder(targetClazz); + copy(Pair.of(sourceClazz, source), builderPair, "urn"); + + final EntitySpec entitySpec = requestClassToEntitySpec(entityRegistry, sourceClazz); + entitySpec.getAspectSpecs().stream() + .forEach( + aspectSpec -> { + try { + copy(Pair.of(sourceClazz, source), builderPair, aspectSpec.getName()); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } + }); + + return targetClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond())); + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - public static Optional convertToScrollResponse(Class scrollRespClazz, String scrollId, List entityResults) { - if (entityResults != null) { - try { - Pair, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); - REFLECT.lookupMethod(builderPair.getFirst(), "scrollId", String.class).invoke(builderPair.getSecond(), scrollId); - REFLECT.lookupMethod(builderPair.getFirst(), "entities", List.class).invoke(builderPair.getSecond(), entityResults); - - return Optional.of(scrollRespClazz.cast(REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); - - } catch (InvocationTargetException | IllegalAccessException e) { - throw new RuntimeException(e); - } - } - return Optional.empty(); + return null; + } + + public static Optional convertToScrollResponse( + Class scrollRespClazz, String scrollId, List entityResults) { + if (entityResults != null) { + try { + Pair, Object> builderPair = REFLECT.getBuilder(scrollRespClazz); + REFLECT + .lookupMethod(builderPair.getFirst(), "scrollId", String.class) + .invoke(builderPair.getSecond(), scrollId); + REFLECT + .lookupMethod(builderPair.getFirst(), "entities", List.class) + .invoke(builderPair.getSecond(), entityResults); + + return Optional.of( + scrollRespClazz.cast( + REFLECT.lookupMethod(builderPair, "build").invoke(builderPair.getSecond()))); + + } catch (InvocationTargetException | IllegalAccessException e) { + throw new RuntimeException(e); + } } - - - - private static void copy(Pair, Object> sourcePair, Pair, Object> builderPair, String method) - throws InvocationTargetException, IllegalAccessException { - Method sourceMethod = REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); - if (sourceMethod != null) { - Class paramClazz = null; - Object param = null; - if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { - Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); - if (sourceParam != null) { - paramClazz = REFLECT.lookupClass(sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); - Pair, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); - - for (Method m : sourceMethod.getReturnType().getMethods()) { - if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { - String getterMethod = m.getName().replaceFirst("^get", ""); - copy(Pair.of(sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), - aspectBuilder, getterMethod); - } - } - - param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); - } - } else { - paramClazz = sourceMethod.getReturnType(); - param = sourceMethod.invoke(sourcePair.getSecond()); + return Optional.empty(); + } + + private static void copy( + Pair, Object> sourcePair, Pair, Object> builderPair, String method) + throws InvocationTargetException, IllegalAccessException { + Method sourceMethod = + REFLECT.lookupMethod(sourcePair, String.format("get%s", toUpperFirst(method))); + if (sourceMethod != null) { + Class paramClazz = null; + Object param = null; + if (sourceMethod.getReturnType().getSimpleName().contains("Request")) { + Object sourceParam = sourceMethod.invoke(sourcePair.getSecond()); + if (sourceParam != null) { + paramClazz = + REFLECT.lookupClass( + sourceMethod.getReturnType().getSimpleName().replace("Request", "Response")); + Pair, Object> aspectBuilder = REFLECT.getBuilder(paramClazz); + + for (Method m : sourceMethod.getReturnType().getMethods()) { + if (m.getName().startsWith("get") && !Objects.equals("getClass", m.getName())) { + String getterMethod = m.getName().replaceFirst("^get", ""); + copy( + Pair.of( + sourceMethod.getReturnType(), sourceMethod.invoke(sourcePair.getSecond())), + aspectBuilder, + getterMethod); } + } - if (param != null) { - Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); - targetMethod.invoke(builderPair.getSecond(), param); - } - } else { - log.info("Class {} doesn't container method {}", sourcePair.getFirst(), - String.format("get%s", toUpperFirst(method))); + param = REFLECT.lookupMethod(aspectBuilder, "build").invoke(aspectBuilder.getSecond()); } + } else { + paramClazz = sourceMethod.getReturnType(); + param = sourceMethod.invoke(sourcePair.getSecond()); + } + + if (param != null) { + Method targetMethod = REFLECT.lookupMethod(builderPair, toLowerFirst(method), paramClazz); + targetMethod.invoke(builderPair.getSecond(), param); + } + } else { + log.info( + "Class {} doesn't container method {}", + sourcePair.getFirst(), + String.format("get%s", toUpperFirst(method))); } - - public static EntitySpec requestClassToEntitySpec(EntityRegistry entityRegistry, Class reqClazz) { - final String entityType = toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } - - public static EntitySpec responseClassToEntitySpec(EntityRegistry entityRegistry, Class respClazz) { - String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); - return entityRegistry.getEntitySpec(entityType); - } + } + + public static EntitySpec requestClassToEntitySpec( + EntityRegistry entityRegistry, Class reqClazz) { + final String entityType = + toLowerFirst(reqClazz.getSimpleName().replace(ENTITY_REQUEST_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } + + public static EntitySpec responseClassToEntitySpec( + EntityRegistry entityRegistry, Class respClazz) { + String entityType = toLowerFirst(respClazz.getSimpleName().replace(ENTITY_RESPONSE_SUFFIX, "")); + return entityRegistry.getEntitySpec(entityType); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java index cabaa2cbd75e6..920a13d998985 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/config/OpenAPIEntityTestConfiguration.java @@ -1,5 +1,11 @@ package io.datahubproject.openapi.config; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -26,6 +32,9 @@ import io.datahubproject.openapi.generated.EntityResponse; import io.datahubproject.openapi.relationships.RelationshipsController; import io.datahubproject.openapi.timeline.TimelineController; +import java.util.Arrays; +import java.util.Map; +import java.util.stream.Collectors; import org.mockito.Mockito; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -33,102 +42,96 @@ import org.springframework.context.annotation.Primary; import org.springframework.http.ResponseEntity; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.anyList; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - - @TestConfiguration public class OpenAPIEntityTestConfiguration { - @Bean - public ObjectMapper objectMapper() { - return new ObjectMapper(new YAMLFactory()); - } - - @Bean - @Primary - public EntityService entityService(final EntityRegistry mockRegistry) { - EntityService entityService = mock(EntityServiceImpl.class); - when(entityService.getEntityRegistry()).thenReturn(mockRegistry); - return entityService; - } - - @Bean - @Primary - public SearchService searchService() { - SearchService searchService = mock(SearchService.class); - when(searchService.scrollAcrossEntities(anyList(), any(), any(), any(), - any(), any(), anyInt(), any())) - .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); - - return searchService; - } - - @Bean - public AuthorizerChain authorizerChain() { - AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - - Authentication authentication = Mockito.mock(Authentication.class); - when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); - AuthenticationContext.setAuthentication(authentication); - - return authorizerChain; - } - - @MockBean(name = "elasticSearchSystemMetadataService") - public SystemMetadataService systemMetadataService; - - @MockBean - public TimelineService timelineService; - - @Bean("entityRegistry") - @Primary - public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { - /* - Considered a few different approach to loading a custom model. Chose this method - to as closely match a production configuration rather than direct project to project - dependency. - */ - PluginEntityRegistryLoader custom = new PluginEntityRegistryLoader( - getClass().getResource("/custom-model").getFile()); - - ConfigEntityRegistry standard = new ConfigEntityRegistry( - OpenAPIEntityTestConfiguration.class.getClassLoader().getResourceAsStream("entity-registry.yml")); - MergedEntityRegistry entityRegistry = new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); - custom.withBaseRegistry(entityRegistry).start(true); - - return entityRegistry; - } - - /* Controllers not under this module */ - @Bean - @Primary - public EntitiesController entitiesController() { - EntitiesController entitiesController = mock(EntitiesController.class); - when(entitiesController.getEntities(any(), any())) - .thenAnswer(params -> { - String[] urns = params.getArgument(0); - String[] aspects = params.getArgument(1); - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(Arrays.stream(urns) - .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - .build()); - }); - - return entitiesController; - } - - @MockBean - public TimelineController timelineController; - - @MockBean - public RelationshipsController relationshipsController; + @Bean + public ObjectMapper objectMapper() { + return new ObjectMapper(new YAMLFactory()); + } + + @Bean + @Primary + public EntityService entityService(final EntityRegistry mockRegistry) { + EntityService entityService = mock(EntityServiceImpl.class); + when(entityService.getEntityRegistry()).thenReturn(mockRegistry); + return entityService; + } + + @Bean + @Primary + public SearchService searchService() { + SearchService searchService = mock(SearchService.class); + when(searchService.scrollAcrossEntities( + anyList(), any(), any(), any(), any(), any(), anyInt(), any())) + .thenReturn(new ScrollResult().setEntities(new SearchEntityArray())); + + return searchService; + } + + @Bean + public AuthorizerChain authorizerChain() { + AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); + + Authentication authentication = Mockito.mock(Authentication.class); + when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + AuthenticationContext.setAuthentication(authentication); + + return authorizerChain; + } + + @MockBean(name = "elasticSearchSystemMetadataService") + public SystemMetadataService systemMetadataService; + + @MockBean public TimelineService timelineService; + + @Bean("entityRegistry") + @Primary + public EntityRegistry entityRegistry() throws EntityRegistryException, InterruptedException { + /* + Considered a few different approach to loading a custom model. Chose this method + to as closely match a production configuration rather than direct project to project + dependency. + */ + PluginEntityRegistryLoader custom = + new PluginEntityRegistryLoader(getClass().getResource("/custom-model").getFile()); + + ConfigEntityRegistry standard = + new ConfigEntityRegistry( + OpenAPIEntityTestConfiguration.class + .getClassLoader() + .getResourceAsStream("entity-registry.yml")); + MergedEntityRegistry entityRegistry = + new MergedEntityRegistry(SnapshotEntityRegistry.getInstance()).apply(standard); + custom.withBaseRegistry(entityRegistry).start(true); + + return entityRegistry; + } + + /* Controllers not under this module */ + @Bean + @Primary + public EntitiesController entitiesController() { + EntitiesController entitiesController = mock(EntitiesController.class); + when(entitiesController.getEntities(any(), any())) + .thenAnswer( + params -> { + String[] urns = params.getArgument(0); + String[] aspects = params.getArgument(1); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + Arrays.stream(urns) + .map(urn -> Map.entry(urn, EntityResponse.builder().urn(urn).build())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + .build()); + }); + + return entitiesController; + } + + @MockBean public TimelineController timelineController; + + @MockBean public RelationshipsController relationshipsController; } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java index 57803ac904a93..1f8f0a5023513 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/delegates/EntityApiDelegateImplTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.delegates; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import static org.testng.Assert.*; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.metadata.models.registry.EntityRegistry; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; @@ -31,6 +34,7 @@ import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.controller.ChartApiController; import io.datahubproject.openapi.generated.controller.DatasetApiController; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; @@ -46,208 +50,245 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; -import static org.testng.Assert.*; - - @SpringBootTest(classes = {SpringWebConfig.class}) @ComponentScan(basePackages = {"io.datahubproject.openapi.generated.controller"}) @Import({OpenAPIEntityTestConfiguration.class}) @AutoConfigureMockMvc public class EntityApiDelegateImplTest extends AbstractTestNGSpringContextTests { - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Autowired - private ChartApiController chartApiController; - @Autowired - private DatasetApiController datasetApiController; - @Autowired - private EntityRegistry entityRegistry; - @Autowired - private MockMvc mockMvc; - - @Test - public void initTest() { - assertNotNull(chartApiController); - assertNotNull(datasetApiController); - - assertTrue(entityRegistry.getEntitySpec("dataset").getAspectSpecMap().containsKey("customDataQualityRules"), - "Failed to load custom model from custom registry"); - } - - @Test - public void chartApiControllerTest() { - final String testUrn = "urn:li:chart:(looker,baz1)"; - - ChartEntityRequestV2 req = ChartEntityRequestV2.builder() - .urn(testUrn) - .build(); - ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = chartApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity deleteResp = chartApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity headResp = chartApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity scrollResp = chartApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void datasetApiControllerTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder() - .urn(testUrn) - .build(); - DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); - assertEquals(resp.getUrn(), testUrn); - - resp = datasetApiController.get(testUrn, false, List.of()).getBody(); - assertEquals(resp.getUrn(), testUrn); - - ResponseEntity deleteResp = datasetApiController.delete(testUrn); - assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); - - ResponseEntity headResp = datasetApiController.head(testUrn); - assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); - - ResponseEntity scrollResp = datasetApiController.scroll( - false, List.of(), 10, null, null, null, null); - assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); - assertNotNull(scrollResp.getBody().getEntities()); - } - - @Test - public void browsePathsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - BrowsePathsV2AspectRequestV2 req = BrowsePathsV2AspectRequestV2.builder() - .value(BrowsePathsV2.builder().path(List.of(BrowsePathEntry.builder().urn(testUrn) - .id("path").build())).build()).build(); - assertEquals(datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void deprecationTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DeprecationAspectRequestV2 req = DeprecationAspectRequestV2.builder() - .value(Deprecation.builder().deprecated(true).build()).build(); - assertEquals(datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void domainsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - DomainsAspectRequestV2 req = DomainsAspectRequestV2.builder() - .value(Domains.builder().domains(List.of("my_domain")).build()).build(); - assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void ownershipTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - OwnershipAspectRequestV2 req = OwnershipAspectRequestV2.builder() - .value(Ownership.builder().owners(List.of(Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())).build()).build(); - assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void statusTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - StatusAspectRequestV2 req = StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); - assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void globalTagsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlobalTagsAspectRequestV2 req = GlobalTagsAspectRequestV2.builder() - .value(GlobalTags.builder().tags(List.of(TagAssociation.builder().tag("tag").build())).build()).build(); - assertEquals(datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - @Test - public void glossaryTermsTest() { - final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; - - GlossaryTermsAspectRequestV2 req = GlossaryTermsAspectRequestV2.builder() - .value(GlossaryTerms.builder().terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())).build()).build(); - assertEquals(datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); - assertEquals(datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); - assertEquals(datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); - } - - - /** - * The purpose of this test is to ensure no errors when a custom aspect is encountered, - * not that the custom aspect is processed. The missing piece to support custom - * aspects is the openapi generated classes for the custom aspects and related request/responses. - */ - @Test - public void customModelTest() throws Exception { - String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; - - //CHECKSTYLE:OFF - String body = "[\n" + - " {\n" + - " \"urn\": \"" + expectedUrn + "\",\n" + - " \"customDataQualityRules\": [\n" + - " {\n" + - " \"field\": \"my_event_data\",\n" + - " \"isFieldLevel\": false,\n" + - " \"type\": \"isNull\",\n" + - " \"checkDefinition\": \"n/a\",\n" + - " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + - " }\n" + - " ]\n" + - " }\n" + - "]"; - //CHECKSTYLE:ON - - mockMvc.perform(MockMvcRequestBuilders - .post("/v2/entity/dataset") - .content(body) - .contentType(MediaType.APPLICATION_JSON) - .accept(MediaType.APPLICATION_JSON)) - .andExpect(status().is2xxSuccessful()) - .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); - } + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Autowired private ChartApiController chartApiController; + @Autowired private DatasetApiController datasetApiController; + @Autowired private EntityRegistry entityRegistry; + @Autowired private MockMvc mockMvc; + + @Test + public void initTest() { + assertNotNull(chartApiController); + assertNotNull(datasetApiController); + + assertTrue( + entityRegistry + .getEntitySpec("dataset") + .getAspectSpecMap() + .containsKey("customDataQualityRules"), + "Failed to load custom model from custom registry"); + } + + @Test + public void chartApiControllerTest() { + final String testUrn = "urn:li:chart:(looker,baz1)"; + + ChartEntityRequestV2 req = ChartEntityRequestV2.builder().urn(testUrn).build(); + ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = chartApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity deleteResp = chartApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity headResp = chartApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity scrollResp = + chartApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void datasetApiControllerTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder().urn(testUrn).build(); + DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); + assertEquals(resp.getUrn(), testUrn); + + resp = datasetApiController.get(testUrn, false, List.of()).getBody(); + assertEquals(resp.getUrn(), testUrn); + + ResponseEntity deleteResp = datasetApiController.delete(testUrn); + assertEquals(deleteResp.getStatusCode(), HttpStatus.OK); + + ResponseEntity headResp = datasetApiController.head(testUrn); + assertEquals(headResp.getStatusCode(), HttpStatus.NOT_FOUND); + + ResponseEntity scrollResp = + datasetApiController.scroll(false, List.of(), 10, null, null, null, null); + assertEquals(scrollResp.getStatusCode(), HttpStatus.OK); + assertNotNull(scrollResp.getBody().getEntities()); + } + + @Test + public void browsePathsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + BrowsePathsV2AspectRequestV2 req = + BrowsePathsV2AspectRequestV2.builder() + .value( + BrowsePathsV2.builder() + .path(List.of(BrowsePathEntry.builder().urn(testUrn).id("path").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void deprecationTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DeprecationAspectRequestV2 req = + DeprecationAspectRequestV2.builder() + .value(Deprecation.builder().deprecated(true).build()) + .build(); + assertEquals( + datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headDeprecation(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void domainsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + DomainsAspectRequestV2 req = + DomainsAspectRequestV2.builder() + .value(Domains.builder().domains(List.of("my_domain")).build()) + .build(); + assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headDomains(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void ownershipTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + OwnershipAspectRequestV2 req = + OwnershipAspectRequestV2.builder() + .value( + Ownership.builder() + .owners( + List.of( + Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())) + .build()) + .build(); + assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headOwnership(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void statusTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + StatusAspectRequestV2 req = + StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); + assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals(datasetApiController.headStatus(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void globalTagsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlobalTagsAspectRequestV2 req = + GlobalTagsAspectRequestV2.builder() + .value( + GlobalTags.builder() + .tags(List.of(TagAssociation.builder().tag("tag").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlobalTags(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + @Test + public void glossaryTermsTest() { + final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; + + GlossaryTermsAspectRequestV2 req = + GlossaryTermsAspectRequestV2.builder() + .value( + GlossaryTerms.builder() + .terms(List.of(GlossaryTermAssociation.builder().urn("term urn").build())) + .build()) + .build(); + assertEquals( + datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), + HttpStatus.NOT_FOUND); + assertEquals( + datasetApiController.headGlossaryTerms(testUrn).getStatusCode(), HttpStatus.NOT_FOUND); + } + + /** + * The purpose of this test is to ensure no errors when a custom aspect is encountered, not that + * the custom aspect is processed. The missing piece to support custom aspects is the openapi + * generated classes for the custom aspects and related request/responses. + */ + @Test + public void customModelTest() throws Exception { + String expectedUrn = "urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"; + + // CHECKSTYLE:OFF + String body = + "[\n" + + " {\n" + + " \"urn\": \"" + + expectedUrn + + "\",\n" + + " \"customDataQualityRules\": [\n" + + " {\n" + + " \"field\": \"my_event_data\",\n" + + " \"isFieldLevel\": false,\n" + + " \"type\": \"isNull\",\n" + + " \"checkDefinition\": \"n/a\",\n" + + " \"url\": \"https://github.com/datahub-project/datahub/blob/master/checks/nonNull.sql\"\n" + + " }\n" + + " ]\n" + + " }\n" + + "]"; + // CHECKSTYLE:ON + + mockMvc + .perform( + MockMvcRequestBuilders.post("/v2/entity/dataset") + .content(body) + .contentType(MediaType.APPLICATION_JSON) + .accept(MediaType.APPLICATION_JSON)) + .andExpect(status().is2xxSuccessful()) + .andExpect(MockMvcResultMatchers.jsonPath("$.[0].urn").value(expectedUrn)); + } } diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java index b4e87eedea542..12596d9410874 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/util/OpenApiEntitiesUtilTest.java @@ -1,13 +1,17 @@ package io.datahubproject.openapi.util; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; -import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.spring.YamlPropertySourceFactory; import io.datahubproject.openapi.config.OpenAPIEntityTestConfiguration; import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.generated.ContainerEntityRequestV2; import io.datahubproject.openapi.generated.ContainerKey; import io.datahubproject.openapi.generated.ContainerKeyAspectRequestV2; +import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.PropertySource; @@ -15,41 +19,44 @@ import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import java.util.List; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - - @Import({OpenAPIEntityTestConfiguration.class}) @PropertySource(value = "classpath:/application.yml", factory = YamlPropertySourceFactory.class) public class OpenApiEntitiesUtilTest extends AbstractTestNGSpringContextTests { - @Autowired - private EntityRegistry entityRegistry; - - @BeforeTest - public void disableAssert() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - } - - @Test - public void testInitialization() { - assertNotNull(entityRegistry); - } - - @Test - public void containerConversionTest() { - ContainerEntityRequestV2 test = ContainerEntityRequestV2.builder() - .urn("urn:li:container:123") - .containerKey(ContainerKeyAspectRequestV2.builder().value(ContainerKey.builder().guid("123").build()).build()) - .build(); - List expected = List.of(UpsertAspectRequest.builder() + @Autowired private EntityRegistry entityRegistry; + + @BeforeTest + public void disableAssert() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + } + + @Test + public void testInitialization() { + assertNotNull(entityRegistry); + } + + @Test + public void containerConversionTest() { + ContainerEntityRequestV2 test = + ContainerEntityRequestV2.builder() + .urn("urn:li:container:123") + .containerKey( + ContainerKeyAspectRequestV2.builder() + .value(ContainerKey.builder().guid("123").build()) + .build()) + .build(); + List expected = + List.of( + UpsertAspectRequest.builder() .entityType("container") .entityUrn("urn:li:container:123") .aspect(ContainerKey.builder().guid("123").build()) .build()); - assertEquals(expected, OpenApiEntitiesUtil.convertEntityToUpsert(test, ContainerEntityRequestV2.class, entityRegistry)); - } + assertEquals( + expected, + OpenApiEntitiesUtil.convertEntityToUpsert( + test, ContainerEntityRequestV2.class, entityRegistry)); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java index 47e2cfec3a9c0..cc040d29657b2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/GlobalControllerExceptionHandler.java @@ -6,7 +6,6 @@ import org.springframework.web.bind.annotation.ControllerAdvice; import org.springframework.web.bind.annotation.ExceptionHandler; - @ControllerAdvice public class GlobalControllerExceptionHandler { @ExceptionHandler(ConversionFailedException.class) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java index e4f49df90c392..ed98cf3ef4ce9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/config/SpringWebConfig.java @@ -5,7 +5,6 @@ import io.swagger.v3.oas.annotations.info.Info; import io.swagger.v3.oas.annotations.servers.Server; import java.util.List; - import org.springdoc.core.GroupedOpenApi; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -18,10 +17,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), - servers = {@Server(url = "/openapi/", description = "Default Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "2.0.0"), + servers = {@Server(url = "/openapi/", description = "Default Server URL")}) @Configuration public class SpringWebConfig implements WebMvcConfigurer { @@ -41,20 +40,17 @@ public void addFormatters(FormatterRegistry registry) { @Bean public GroupedOpenApi defaultOpenApiGroup() { return GroupedOpenApi.builder() - .group("default") - .packagesToExclude( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("default") + .packagesToExclude( + "io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } @Bean public GroupedOpenApi operationsOpenApiGroup() { return GroupedOpenApi.builder() - .group("operations") - .packagesToScan( - "io.datahubproject.openapi.operations", - "io.datahubproject.openapi.health" - ).build(); + .group("operations") + .packagesToScan("io.datahubproject.openapi.operations", "io.datahubproject.openapi.health") + .build(); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java index e88f499208af8..c092a2423fdf5 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/converter/StringToChangeCategoryConverter.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.converter; +import static com.linkedin.metadata.timeline.data.ChangeCategory.*; + import com.linkedin.metadata.timeline.data.ChangeCategory; import java.util.List; import java.util.Optional; @@ -8,28 +10,29 @@ import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.converter.Converter; -import static com.linkedin.metadata.timeline.data.ChangeCategory.*; - - public class StringToChangeCategoryConverter implements Converter { @Override public ChangeCategory convert(String source) { try { String upperCase = source.toUpperCase(); - // For compound enums, want to support different cases i.e. technical_schema, technical schema, technical-schema, etc. - Optional compoundCategory = COMPOUND_CATEGORIES.keySet().stream() - .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) - .map(COMPOUND_CATEGORIES::get) - .findFirst(); + // For compound enums, want to support different cases i.e. technical_schema, technical + // schema, technical-schema, etc. + Optional compoundCategory = + COMPOUND_CATEGORIES.keySet().stream() + .filter(compoundCategoryKey -> matchCompound(compoundCategoryKey, upperCase)) + .map(COMPOUND_CATEGORIES::get) + .findFirst(); return compoundCategory.orElseGet(() -> ChangeCategory.valueOf(upperCase)); } catch (Exception e) { - throw new ConversionFailedException(TypeDescriptor.valueOf(String.class), - TypeDescriptor.valueOf(ChangeCategory.class), source, e); + throw new ConversionFailedException( + TypeDescriptor.valueOf(String.class), + TypeDescriptor.valueOf(ChangeCategory.class), + source, + e); } } private boolean matchCompound(@Nonnull List compoundCategoryKey, @Nonnull String source) { - return compoundCategoryKey.stream() - .allMatch(source::contains); + return compoundCategoryKey.stream().allMatch(source::contains); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java index 0be69e3264957..07a501885f1aa 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/RollbackRunResultDto.java @@ -6,7 +6,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java index 67858581ba97a..d185e01804c24 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UpsertAspectRequest.java @@ -9,7 +9,6 @@ import lombok.Builder; import lombok.Value; - @JsonInclude(JsonInclude.Include.NON_NULL) @Value @Builder @@ -17,15 +16,21 @@ public class UpsertAspectRequest { @JsonProperty("entityType") - @Schema(required = true, description = "The name of the entity matching with its definition in the entity registry") + @Schema( + required = true, + description = "The name of the entity matching with its definition in the entity registry") String entityType; @JsonProperty("entityUrn") - @Schema(description = "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") + @Schema( + description = + "Urn of the entity to be updated with the corresponding aspect, required if entityKey is null") String entityUrn; @JsonProperty("entityKeyAspect") - @Schema(description = "A key aspect referencing the entity to be updated, required if entityUrn is null") + @Schema( + description = + "A key aspect referencing the entity to be updated, required if entityUrn is null") OneOfGenericAspectValue entityKeyAspect; @JsonProperty("aspect") @@ -33,7 +38,5 @@ public class UpsertAspectRequest { OneOfGenericAspectValue aspect; @JsonPOJOBuilder(withPrefix = "") - public static class UpsertAspectRequestBuilder { - - } + public static class UpsertAspectRequestBuilder {} } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java index 02be0cc93eb1c..60062823a7d82 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/dto/UrnResponseMap.java @@ -7,7 +7,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder @JsonInclude(JsonInclude.Include.NON_NULL) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java index 898f768cf999a..6e0fc5deb0b3c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.entities; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -52,14 +54,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - @RestController @RequiredArgsConstructor @RequestMapping("/entities/v1") @Slf4j -@Tag(name = "Entities", description = "APIs for ingesting and accessing entities and their constituent aspects") +@Tag( + name = "Entities", + description = "APIs for ingesting and accessing entities and their constituent aspects") public class EntitiesController { private final EntityService _entityService; @@ -76,27 +77,42 @@ public void initBinder(WebDataBinder binder) { @GetMapping(value = "/latest", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity getEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, @Parameter(name = "aspectNames", description = "The list of aspect names to retrieve") - @RequestParam(name = "aspectNames", required = false) @Nullable String[] aspectNames) { + @RequestParam(name = "aspectNames", required = false) + @Nullable + String[] aspectNames) { Timer.Context context = MetricUtils.timer("getEntities").time(); final Set entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); log.debug("GET ENTITIES {}", entityUrns); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - ))); - - List> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); + + List> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); } if (entityUrns.size() <= 0) { @@ -104,19 +120,26 @@ public ResponseEntity getEntities( } // TODO: Only supports one entity type at a time, may cause confusion final String entityName = urnToEntityName(entityUrns.iterator().next()); - final Set projectedAspects = aspectNames == null ? _entityService.getEntityAspectNames(entityName) - : new HashSet<>(Arrays.asList(aspectNames)); + final Set projectedAspects = + aspectNames == null + ? _entityService.getEntityAspectNames(entityName) + : new HashSet<>(Arrays.asList(aspectNames)); Throwable exceptionally = null; try { - return ResponseEntity.ok(UrnResponseMap.builder() - .responses(MappingUtil.mapServiceResponse(_entityService - .getEntitiesV2(entityName, entityUrns, projectedAspects), _objectMapper)) - .build()); + return ResponseEntity.ok( + UrnResponseMap.builder() + .responses( + MappingUtil.mapServiceResponse( + _entityService.getEntitiesV2(entityName, entityUrns, projectedAspects), + _objectMapper)) + .build()); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", entityUrns, - projectedAspects), e); + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + entityUrns, projectedAspects), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getEntities", "failed")).inc(); @@ -134,24 +157,34 @@ public ResponseEntity> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); - List proposals = aspectRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); + List proposals = + aspectRequests.stream() + .map(MappingUtil::mapToProposal) + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } @@ -159,52 +192,83 @@ public ResponseEntity> postEntities( @DeleteMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity> deleteEntities( - @Parameter(name = "urns", required = true, description = "A list of raw urn strings, only supports a single entity type per request.") - @RequestParam("urns") @Nonnull String[] urns, - @Parameter(name = "soft", description = "Determines whether the delete will be soft or hard, defaults to true for soft delete") - @RequestParam(value = "soft", defaultValue = "true") boolean soft) { + @Parameter( + name = "urns", + required = true, + description = + "A list of raw urn strings, only supports a single entity type per request.") + @RequestParam("urns") + @Nonnull + String[] urns, + @Parameter( + name = "soft", + description = + "Determines whether the delete will be soft or hard, defaults to true for soft delete") + @RequestParam(value = "soft", defaultValue = "true") + boolean soft) { Throwable exceptionally = null; try (Timer.Context context = MetricUtils.timer("deleteEntities").time()) { - Authentication authentication = AuthenticationContext.getAuthentication(); - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType()) - ))); - final Set entityUrns = Arrays.stream(urns) - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access - .map(URLDecoder::decode) - .map(UrnUtils::getUrn).collect(Collectors.toSet()); - - List> resourceSpecs = entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { - UnauthorizedException unauthorizedException = new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); - exceptionally = unauthorizedException; - throw unauthorizedException; - } + Authentication authentication = AuthenticationContext.getAuthentication(); + String actorUrnStr = authentication.getActor().toUrnStr(); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + final Set entityUrns = + Arrays.stream(urns) + // Have to decode here because of frontend routing, does No-op for already unencoded + // through direct API access + .map(URLDecoder::decode) + .map(UrnUtils::getUrn) + .collect(Collectors.toSet()); - if (!soft) { - return ResponseEntity.ok(entityUrns.stream() - .map(_entityService::deleteUrn) - .map(rollbackRunResult -> MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) - .collect(Collectors.toList())); - } else { - List deleteRequests = entityUrns.stream() - .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) - .collect(Collectors.toList()); - - return ResponseEntity.ok(Collections.singletonList(RollbackRunResultDto.builder() - .rowsRolledBack(deleteRequests.stream() - .map(MappingUtil::mapToProposal) - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .filter(Pair::getSecond) - .map(Pair::getFirst) - .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) - .collect(Collectors.toList())) - .rowsDeletedFromEntityDeletion(deleteRequests.size()) - .build())); + List> resourceSpecs = + entityUrns.stream() + .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + UnauthorizedException unauthorizedException = + new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); + exceptionally = unauthorizedException; + throw unauthorizedException; + } + + if (!soft) { + return ResponseEntity.ok( + entityUrns.stream() + .map(_entityService::deleteUrn) + .map( + rollbackRunResult -> + MappingUtil.mapRollbackRunResult(rollbackRunResult, _objectMapper)) + .collect(Collectors.toList())); + } else { + List deleteRequests = + entityUrns.stream() + .map(entityUrn -> MappingUtil.createStatusRemoval(entityUrn, _entityService)) + .collect(Collectors.toList()); + + return ResponseEntity.ok( + Collections.singletonList( + RollbackRunResultDto.builder() + .rowsRolledBack( + deleteRequests.stream() + .map(MappingUtil::mapToProposal) + .map( + proposal -> + MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .map( + proposal -> + MappingUtil.ingestProposal( + proposal, actorUrnStr, _entityService)) + .filter(Pair::getSecond) + .map(Pair::getFirst) + .map(urnString -> AspectRowSummary.builder().urn(urnString).build()) + .collect(Collectors.toList())) + .rowsDeletedFromEntityDeletion(deleteRequests.size()) + .build())); } } catch (Exception e) { exceptionally = e; diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java index c90603bf88c31..79a219f891fc9 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthCheckController.java @@ -10,7 +10,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.client.RequestOptions; @@ -25,7 +24,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/") @Tag(name = "HealthCheck", description = "An API for checking health of GMS and its clients.") @@ -33,26 +31,31 @@ public class HealthCheckController { @Autowired @Qualifier("elasticSearchRestHighLevelClient") private RestHighLevelClient elasticClient; + private final Supplier> memoizedSupplier; public HealthCheckController(ConfigurationProvider config) { - this.memoizedSupplier = Suppliers.memoizeWithExpiration( - this::getElasticHealth, config.getHealthCheck().getCacheDurationSeconds(), TimeUnit.SECONDS); + this.memoizedSupplier = + Suppliers.memoizeWithExpiration( + this::getElasticHealth, + config.getHealthCheck().getCacheDurationSeconds(), + TimeUnit.SECONDS); } @GetMapping(path = "/check/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity getCombinedHealthCheck(String... checks) { return ResponseEntity.status(getCombinedDebug(checks).getStatusCode()) - .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); + .body(getCombinedDebug(checks).getStatusCode().is2xxSuccessful()); } /** - * Combined health check endpoint for checking GMS clients. - * For now, just checks the health of the ElasticSearch client - * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health check status of - * that component). The status code will be 200 if all components are okay, and 500 if one or more components are not - * healthy. + * Combined health check endpoint for checking GMS clients. For now, just checks the health of the + * ElasticSearch client + * + * @return A ResponseEntity with a Map of String (component name) to ResponseEntity (the health + * check status of that component). The status code will be 200 if all components are okay, + * and 500 if one or more components are not healthy. */ @GetMapping(path = "/debug/ready", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity>> getCombinedDebug(String... checks) { @@ -60,19 +63,26 @@ public ResponseEntity>> getCombinedDebug(Stri healthChecks.put("elasticsearch", this::getElasticDebugWithCache); // Add new components here - List componentsToCheck = checks != null && checks.length > 0 - ? Arrays.asList(checks) - : new ArrayList<>(healthChecks.keySet()); + List componentsToCheck = + checks != null && checks.length > 0 + ? Arrays.asList(checks) + : new ArrayList<>(healthChecks.keySet()); Map> componentHealth = new HashMap<>(); for (String check : componentsToCheck) { - componentHealth.put(check, - healthChecks.getOrDefault(check, - () -> ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE).body("Unrecognized component " + check)) + componentHealth.put( + check, + healthChecks + .getOrDefault( + check, + () -> + ResponseEntity.status(HttpStatus.SERVICE_UNAVAILABLE) + .body("Unrecognized component " + check)) .get()); } - boolean isHealthy = componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); + boolean isHealthy = + componentHealth.values().stream().allMatch(resp -> resp.getStatusCode() == HttpStatus.OK); if (isHealthy) { return ResponseEntity.ok(componentHealth); } @@ -82,11 +92,12 @@ public ResponseEntity>> getCombinedDebug(Stri @GetMapping(path = "/check/elastic", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity getElasticHealthWithCache() { return ResponseEntity.status(getElasticDebugWithCache().getStatusCode()) - .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); + .body(getElasticDebugWithCache().getStatusCode().is2xxSuccessful()); } /** * Checks the memoized cache for the latest elastic health check result + * * @return The ResponseEntity containing the health check result */ @GetMapping(path = "/debug/elastic", produces = MediaType.APPLICATION_JSON_VALUE) @@ -96,13 +107,15 @@ public ResponseEntity getElasticDebugWithCache() { /** * Query ElasticSearch health endpoint + * * @return A response including the result from ElasticSearch */ private ResponseEntity getElasticHealth() { String responseString = null; try { ClusterHealthRequest request = new ClusterHealthRequest(); - ClusterHealthResponse response = elasticClient.cluster().health(request, RequestOptions.DEFAULT); + ClusterHealthResponse response = + elasticClient.cluster().health(request, RequestOptions.DEFAULT); boolean isHealthy = !response.isTimedOut() && response.getStatus() != ClusterHealthStatus.RED; responseString = response.toString(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java index 2e243f4c8df9e..3fa926924aabe 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/health/HealthController.java @@ -9,7 +9,6 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @Slf4j @RestController @RequestMapping("/up") diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f29461734ebfc..f7c848f91a64c 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -6,15 +6,15 @@ import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import io.datahubproject.openapi.util.ElasticsearchUtils; import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.List; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -28,11 +28,12 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequestMapping("/operations/elasticSearch") @Slf4j -@Tag(name = "ElasticSearchOperations", description = "An API for managing your elasticsearch instance") +@Tag( + name = "ElasticSearchOperations", + description = "An API for managing your elasticsearch instance") public class OperationsController { private final AuthorizerChain _authorizerChain; @@ -51,26 +52,36 @@ public OperationsController(AuthorizerChain authorizerChain) { public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } + @GetMapping(path = "/getTaskStatus", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity getTaskStatus(String task) { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType()) - ))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { - return ResponseEntity.status(HttpStatus.FORBIDDEN).body( - String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN) + .body(String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); } if (!ElasticsearchUtils.isTaskIdValid(task)) { - return ResponseEntity.status(HttpStatus.BAD_REQUEST).body( - String.format("Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", task)); + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body( + String.format( + "Task ID should be in the form nodeId:taskId e.g. aB1cdEf2GHI-JKLMnoPQr3:123456 (got %s)", + task)); } - String nodeIdToQuery = task.split(":")[0]; + String nodeIdToQuery = task.split(":")[0]; long taskIdToQuery = Long.parseLong(task.split(":")[1]); - java.util.Optional res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + java.util.Optional res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); if (res.isEmpty()) { - return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); } GetTaskResponse resp = res.get(); JSONObject j = new JSONObject(); @@ -80,4 +91,4 @@ public ResponseEntity getTaskStatus(String task) { j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); return ResponseEntity.ok(j.toString()); } -} \ No newline at end of file +} diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java index cfb516913eb09..370f2019a42dd 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java @@ -32,12 +32,13 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - @RestController @RequiredArgsConstructor @RequestMapping("/platform/entities/v1") @Slf4j -@Tag(name = "Platform Entities", description = "Platform level APIs intended for lower level access to entities") +@Tag( + name = "Platform Entities", + description = "Platform level APIs intended for lower level access to entities") public class PlatformEntitiesController { private final EntityService _entityService; @@ -60,24 +61,33 @@ public ResponseEntity> postEntities( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List proposals = metadataChangeProposals.stream() - .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) - .collect(Collectors.toList()); - DisjunctivePrivilegeGroup - orGroup = new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()) - ))); + List proposals = + metadataChangeProposals.stream() + .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) + .collect(Collectors.toList()); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !MappingUtil.authorizeProposals(proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { + if (restApiAuthorizationEnabled + && !MappingUtil.authorizeProposals( + proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - List> responses = proposals.stream() - .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) - .collect(Collectors.toList()); + List> responses = + proposals.stream() + .map(proposal -> MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService)) + .collect(Collectors.toList()); if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) - .body(responses.stream().filter(Pair::getSecond).map(Pair::getFirst).collect(Collectors.toList())); + .body( + responses.stream() + .filter(Pair::getSecond) + .map(Pair::getFirst) + .collect(Collectors.toList())); } else { return ResponseEntity.ok(Collections.emptyList()); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java index 4641fed3a8610..4ceed6a11b973 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.relationships; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authentication.Authentication; @@ -45,9 +47,6 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @RestController @RequiredArgsConstructor @RequestMapping("/relationships/v1") @@ -59,6 +58,7 @@ public enum RelationshipDirection { INCOMING, OUTGOING } + private static final int MAX_DOWNSTREAM_CNT = 200; private final GraphService _graphService; private final AuthorizerChain _authorizerChain; @@ -71,83 +71,127 @@ public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; com.linkedin.metadata.query.filter.RelationshipDirection restLiDirection; switch (direction) { - case INCOMING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; - break; - } - case OUTGOING: { - restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; - break; - } - default: { - throw new RuntimeException("Unexpected relationship direction " + direction); - } + case INCOMING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING; + break; + } + case OUTGOING: + { + restLiDirection = com.linkedin.metadata.query.filter.RelationshipDirection.OUTGOING; + break; + } + default: + { + throw new RuntimeException("Unexpected relationship direction " + direction); + } } - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, restLiDirection), + start, + count); } @GetMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) - @Operation(responses = { @ApiResponse(responseCode = "0", description = "", - content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class)))}) + @Operation( + responses = { + @ApiResponse( + responseCode = "0", + description = "", + content = @Content(schema = @Schema(implementation = RelatedEntitiesResult.class))) + }) public ResponseEntity getRelationships( - @Parameter(name = "urn", required = true, - description = "The urn for the entity whose relationships are being queried") - @RequestParam("urn") - @Nonnull String urn, - @Parameter(name = "relationshipTypes", required = true, - description = "The list of relationship types to traverse") - @RequestParam(name = "relationshipTypes") - @Nonnull String[] relationshipTypes, - @Parameter(name = "direction", required = true, - description = "The directionality of the relationship") - @RequestParam(name = "direction") - @Nonnull RelationshipsController.RelationshipDirection direction, - @Parameter(name = "start", description = "An offset for the relationships to return from. " - + "Useful for pagination.") - @RequestParam(name = "start", defaultValue = "0") - @Nullable Integer start, - @Parameter(name = "count", description = "A count of relationships that will be returned " - + "starting from the offset. Useful for pagination.") - @RequestParam(name = "count", defaultValue = "200") - @Nullable Integer count) { + @Parameter( + name = "urn", + required = true, + description = "The urn for the entity whose relationships are being queried") + @RequestParam("urn") + @Nonnull + String urn, + @Parameter( + name = "relationshipTypes", + required = true, + description = "The list of relationship types to traverse") + @RequestParam(name = "relationshipTypes") + @Nonnull + String[] relationshipTypes, + @Parameter( + name = "direction", + required = true, + description = "The directionality of the relationship") + @RequestParam(name = "direction") + @Nonnull + RelationshipsController.RelationshipDirection direction, + @Parameter( + name = "start", + description = + "An offset for the relationships to return from. " + "Useful for pagination.") + @RequestParam(name = "start", defaultValue = "0") + @Nullable + Integer start, + @Parameter( + name = "count", + description = + "A count of relationships that will be returned " + + "starting from the offset. Useful for pagination.") + @RequestParam(name = "count", defaultValue = "200") + @Nullable + Integer count) { Timer.Context context = MetricUtils.timer("getRelationships").time(); - // Have to decode here because of frontend routing, does No-op for already unencoded through direct API access + // Have to decode here because of frontend routing, does No-op for already unencoded through + // direct API access final Urn entityUrn = UrnUtils.getUrn(URLDecoder.decode(urn, Charset.forName("UTF-8"))); log.debug("GET Relationships {}", entityUrn); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the privileges between these APIs. - ))); + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) + // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the + // privileges between these APIs. + ))); List> resourceSpecs = - Collections.singletonList(Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorizedForResources(_authorizerChain, actorUrnStr, resourceSpecs, - orGroup)) { + Collections.singletonList( + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorizedForResources( + _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get relationships."); } Throwable exceptionally = null; try { return ResponseEntity.ok( - getRelatedEntities(entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, - count)); + getRelatedEntities( + entityUrn.toString(), Arrays.asList(relationshipTypes), direction, start, count)); } catch (Exception e) { exceptionally = e; throw new RuntimeException( - String.format("Failed to batch get relationships with urn: %s, relationshipTypes: %s", urn, - Arrays.toString(relationshipTypes)), e); + String.format( + "Failed to batch get relationships with urn: %s, relationshipTypes: %s", + urn, Arrays.toString(relationshipTypes)), + e); } finally { if (exceptionally != null) { MetricUtils.counter(MetricRegistry.name("getRelationships", "failed")).inc(); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java index fbde9e8072002..a84c50e74baf2 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java @@ -30,11 +30,13 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; - @RestController @AllArgsConstructor @RequestMapping("/timeline/v1") -@Tag(name = "Timeline", description = "An API for retrieving historical updates to entities and their related documentation.") +@Tag( + name = "Timeline", + description = + "An API for retrieving historical updates to entities and their related documentation.") public class TimelineController { private final TimelineService _timelineService; @@ -44,7 +46,6 @@ public class TimelineController { private Boolean restApiAuthorizationEnabled; /** - * * @param rawUrn * @param startTime * @param endTime @@ -60,7 +61,8 @@ public ResponseEntity> getTimeline( @RequestParam(defaultValue = "-1") long startTime, @RequestParam(defaultValue = "0") long endTime, @RequestParam(defaultValue = "false") boolean raw, - @RequestParam Set categories) throws URISyntaxException, JsonProcessingException { + @RequestParam Set categories) + throws URISyntaxException, JsonProcessingException { // Make request params when implemented String startVersionStamp = null; String endVersionStamp = null; @@ -68,11 +70,18 @@ public ResponseEntity> getTimeline( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); EntitySpec resourceSpec = new EntitySpec(urn.getEntityType(), rawUrn); - DisjunctivePrivilegeGroup orGroup = new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled && !AuthUtil.isAuthorized(_authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { + DisjunctivePrivilegeGroup orGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); + if (restApiAuthorizationEnabled + && !AuthUtil.isAuthorized( + _authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } - return ResponseEntity.ok(_timelineService.getTimeline(urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); + return ResponseEntity.ok( + _timelineService.getTimeline( + urn, categories, startTime, endTime, startVersionStamp, endVersionStamp, raw)); } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java index 9ef14eefc429b..7b13191bc1b38 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ElasticsearchUtils.java @@ -1,7 +1,8 @@ package io.datahubproject.openapi.util; public class ElasticsearchUtils { - private ElasticsearchUtils() { } + private ElasticsearchUtils() {} + public static boolean isTaskIdValid(String task) { if (task.matches("^[a-zA-Z0-9-_]+:[0-9]+$")) { try { diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index 21dc5a4c8a0d6..0eb3e2d6b8c6e 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -1,11 +1,15 @@ package io.datahubproject.openapi.util; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; +import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; +import static java.nio.charset.StandardCharsets.UTF_8; + import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -21,13 +25,13 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.Aspect; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.RollbackRunResult; import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; -import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; @@ -35,7 +39,15 @@ import com.linkedin.util.Pair; import io.datahubproject.openapi.dto.RollbackRunResultDto; import io.datahubproject.openapi.dto.UpsertAspectRequest; - +import io.datahubproject.openapi.generated.AspectRowSummary; +import io.datahubproject.openapi.generated.AspectType; +import io.datahubproject.openapi.generated.AuditStamp; +import io.datahubproject.openapi.generated.EntityResponse; +import io.datahubproject.openapi.generated.EnvelopedAspect; +import io.datahubproject.openapi.generated.MetadataChangeProposal; +import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; +import io.datahubproject.openapi.generated.OneOfGenericAspectValue; +import io.datahubproject.openapi.generated.Status; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashMap; @@ -51,16 +63,6 @@ import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; - -import io.datahubproject.openapi.generated.AspectRowSummary; -import io.datahubproject.openapi.generated.AspectType; -import io.datahubproject.openapi.generated.AuditStamp; -import io.datahubproject.openapi.generated.EntityResponse; -import io.datahubproject.openapi.generated.EnvelopedAspect; -import io.datahubproject.openapi.generated.MetadataChangeProposal; -import io.datahubproject.openapi.generated.OneOfEnvelopedAspectValue; -import io.datahubproject.openapi.generated.OneOfGenericAspectValue; -import io.datahubproject.openapi.generated.Status; import lombok.extern.slf4j.Slf4j; import org.apache.avro.Schema; import org.reflections.Reflections; @@ -72,36 +74,33 @@ import org.springframework.http.MediaType; import org.springframework.web.client.HttpClientErrorException; -import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; -import static io.datahubproject.openapi.util.ReflectionCache.toUpperFirst; -import static java.nio.charset.StandardCharsets.UTF_8; - @Slf4j public class MappingUtil { - private MappingUtil() { - - } + private MappingUtil() {} private static final JsonNodeFactory NODE_FACTORY = JsonNodeFactory.instance; - private static final Map> ENVELOPED_ASPECT_TYPE_MAP = - new HashMap<>(); + private static final Map> + ENVELOPED_ASPECT_TYPE_MAP = new HashMap<>(); private static final Map, String> ASPECT_NAME_MAP = new HashMap<>(); - private static final Map> PEGASUS_TYPE_MAP = new HashMap<>(); + private static final Map> PEGASUS_TYPE_MAP = + new HashMap<>(); private static final String DISCRIMINATOR = "__type"; private static final String PEGASUS_PACKAGE = "com.linkedin"; private static final String OPENAPI_PACKAGE = "io.datahubproject.openapi.generated"; - private static final ReflectionCache REFLECT_AVRO = ReflectionCache.builder() - .basePackage("com.linkedin.pegasus2avro").build(); - private static final ReflectionCache REFLECT_OPENAPI = ReflectionCache.builder() - .basePackage(OPENAPI_PACKAGE).build(); + private static final ReflectionCache REFLECT_AVRO = + ReflectionCache.builder().basePackage("com.linkedin.pegasus2avro").build(); + private static final ReflectionCache REFLECT_OPENAPI = + ReflectionCache.builder().basePackage(OPENAPI_PACKAGE).build(); static { // Build a map from __type name to generated class - ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false); + ClassPathScanningCandidateComponentProvider provider = + new ClassPathScanningCandidateComponentProvider(false); provider.addIncludeFilter(new AssignableTypeFilter(OneOfEnvelopedAspectValue.class)); - Set components = provider.findCandidateComponents("io/datahubproject/openapi/generated"); + Set components = + provider.findCandidateComponents("io/datahubproject/openapi/generated"); components.forEach(MappingUtil::putEnvelopedAspectEntry); provider = new ClassPathScanningCandidateComponentProvider(false); @@ -111,36 +110,43 @@ private MappingUtil() { // Build a map from fully qualified Pegasus generated class name to class new Reflections(PEGASUS_PACKAGE, new SubTypesScanner(false)) - .getSubTypesOf(RecordTemplate.class) - .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); + .getSubTypesOf(RecordTemplate.class) + .forEach(aClass -> PEGASUS_TYPE_MAP.put(aClass.getSimpleName(), aClass)); } - public static Map mapServiceResponse(Map serviceResponse, - ObjectMapper objectMapper) { - return serviceResponse.entrySet() - .stream() - .collect(Collectors.toMap(entry -> entry.getKey().toString(), entry -> mapEntityResponse(entry.getValue(), objectMapper))); + public static Map mapServiceResponse( + Map serviceResponse, ObjectMapper objectMapper) { + return serviceResponse.entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> mapEntityResponse(entry.getValue(), objectMapper))); } - public static EntityResponse mapEntityResponse(com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { + public static EntityResponse mapEntityResponse( + com.linkedin.entity.EntityResponse entityResponse, ObjectMapper objectMapper) { return EntityResponse.builder() - .entityName(entityResponse.getEntityName()) - .urn(entityResponse.getUrn().toString()) - .aspects(entityResponse.getAspects() - .entrySet() - .stream() - .collect(Collectors.toMap(Map.Entry::getKey, entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))).build(); + .entityName(entityResponse.getEntityName()) + .urn(entityResponse.getUrn().toString()) + .aspects( + entityResponse.getAspects().entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> mapEnvelopedAspect(entry.getValue(), objectMapper)))) + .build(); } - public static EnvelopedAspect mapEnvelopedAspect(com.linkedin.entity.EnvelopedAspect envelopedAspect, - ObjectMapper objectMapper) { + public static EnvelopedAspect mapEnvelopedAspect( + com.linkedin.entity.EnvelopedAspect envelopedAspect, ObjectMapper objectMapper) { return EnvelopedAspect.builder() - .name(envelopedAspect.getName()) - .timestamp(envelopedAspect.getTimestamp()) - .version(envelopedAspect.getVersion()) - .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) - .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) - .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)).build(); + .name(envelopedAspect.getName()) + .timestamp(envelopedAspect.getTimestamp()) + .version(envelopedAspect.getVersion()) + .type(AspectType.fromValue(envelopedAspect.getType().name().toUpperCase(Locale.ROOT))) + .created(objectMapper.convertValue(envelopedAspect.getCreated().data(), AuditStamp.class)) + .value(mapAspectValue(envelopedAspect.getName(), envelopedAspect.getValue(), objectMapper)) + .build(); } private static DataMap insertDiscriminator(@Nullable Class parentClazz, DataMap dataMap) { @@ -148,20 +154,23 @@ private static DataMap insertDiscriminator(@Nullable Class parentClazz, DataM dataMap.put(DISCRIMINATOR, parentClazz.getSimpleName()); } - Set> requiresDiscriminator = dataMap.entrySet().stream() + Set> requiresDiscriminator = + dataMap.entrySet().stream() .filter(e -> e.getValue() instanceof DataMap) .filter(e -> shouldCollapseClassToDiscriminator(e.getKey())) .map(e -> Map.entry(e.getKey(), (DataMap) e.getValue())) .collect(Collectors.toSet()); // DataMap doesn't support concurrent access - requiresDiscriminator.forEach(e -> { - dataMap.remove(e.getKey()); - dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); - dataMap.putAll(e.getValue()); - }); + requiresDiscriminator.forEach( + e -> { + dataMap.remove(e.getKey()); + dataMap.put(DISCRIMINATOR, e.getKey().substring(e.getKey().lastIndexOf(".") + 1)); + dataMap.putAll(e.getValue()); + }); // Look through all the nested classes for possible discriminator requirements - Set, DataMap>> nestedDataMaps = getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); + Set, DataMap>> nestedDataMaps = + getDataMapPaths(new LinkedList<>(), dataMap).collect(Collectors.toSet()); // DataMap doesn't support concurrent access for (Pair, DataMap> nestedDataMapPath : nestedDataMaps) { List nestedPath = nestedDataMapPath.getFirst(); @@ -178,7 +187,10 @@ private static DataMap insertDiscriminator(@Nullable Class parentClazz, DataM nextClazz = getMethod != null ? getMethod.getReturnType() : null; if (nextClazz != null && "List".equals(nextClazz.getSimpleName())) { - String listElemClassName = getMethod.getGenericReturnType().getTypeName() + String listElemClassName = + getMethod + .getGenericReturnType() + .getTypeName() .replace("java.util.List<", "") .replace(">", ""); try { @@ -192,7 +204,7 @@ private static DataMap insertDiscriminator(@Nullable Class parentClazz, DataM } if ((nextClazz != parentClazz && shouldCheckTypeMethod(nextClazz)) - || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { + || nested.keySet().stream().anyMatch(MappingUtil::shouldCollapseClassToDiscriminator)) { insertDiscriminator(nextClazz, nested); } } @@ -201,42 +213,49 @@ private static DataMap insertDiscriminator(@Nullable Class parentClazz, DataM return dataMap; } - /** * Stream paths to DataMaps + * * @param paths current path * @param data current DataMap or DataList * @return path to all nested DataMaps */ - private static Stream, DataMap>> getDataMapPaths(List paths, Object data) { + private static Stream, DataMap>> getDataMapPaths( + List paths, Object data) { if (data instanceof DataMap) { - return ((DataMap) data).entrySet().stream() + return ((DataMap) data) + .entrySet().stream() .filter(e -> e.getValue() instanceof DataMap || e.getValue() instanceof DataList) - .flatMap(entry -> { - List thisPath = new LinkedList<>(paths); - thisPath.add(entry.getKey()); - if (entry.getValue() instanceof DataMap) { - return Stream.concat( + .flatMap( + entry -> { + List thisPath = new LinkedList<>(paths); + thisPath.add(entry.getKey()); + if (entry.getValue() instanceof DataMap) { + return Stream.concat( Stream.of(Pair.of(thisPath, (DataMap) entry.getValue())), - getDataMapPaths(thisPath, entry.getValue()) - ); - } else { - // DataList - return getDataMapPaths(thisPath, entry.getValue()); - } - }); + getDataMapPaths(thisPath, entry.getValue())); + } else { + // DataList + return getDataMapPaths(thisPath, entry.getValue()); + } + }); } else if (data instanceof DataList) { DataList dataList = (DataList) data; return IntStream.range(0, dataList.size()) - .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) - .filter(idxObject -> idxObject.getValue() instanceof DataMap || idxObject.getValue() instanceof DataList) - .flatMap(idxObject -> { + .mapToObj(idx -> Pair.of(idx, dataList.get(idx))) + .filter( + idxObject -> + idxObject.getValue() instanceof DataMap + || idxObject.getValue() instanceof DataList) + .flatMap( + idxObject -> { Object item = idxObject.getValue(); List thisPath = new LinkedList<>(paths); thisPath.add("[" + idxObject.getKey() + "]"); if (item instanceof DataMap) { - return Stream.concat(Stream.of(Pair.of(thisPath, (DataMap) item)), - getDataMapPaths(thisPath, item)); + return Stream.concat( + Stream.of(Pair.of(thisPath, (DataMap) item)), + getDataMapPaths(thisPath, item)); } else { // DataList return getDataMapPaths(thisPath, item); @@ -246,8 +265,10 @@ private static Stream, DataMap>> getDataMapPaths(List return Stream.empty(); } - public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect aspect, ObjectMapper objectMapper) { - Class aspectClass = ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); + public static OneOfEnvelopedAspectValue mapAspectValue( + String aspectName, Aspect aspect, ObjectMapper objectMapper) { + Class aspectClass = + ENVELOPED_ASPECT_TYPE_MAP.get(aspectName); DataMap wrapper = insertDiscriminator(aspectClass, aspect.data()); try { String dataMapAsJson = objectMapper.writeValueAsString(wrapper); @@ -261,7 +282,8 @@ public static OneOfEnvelopedAspectValue mapAspectValue(String aspectName, Aspect private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { try { Class cls = - (Class) Class.forName(beanDefinition.getBeanClassName()); + (Class) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ENVELOPED_ASPECT_TYPE_MAP.put(aspectName, cls); } catch (ClassNotFoundException e) { @@ -273,7 +295,8 @@ private static void putEnvelopedAspectEntry(BeanDefinition beanDefinition) { private static void putGenericAspectEntry(BeanDefinition beanDefinition) { try { Class cls = - (Class) Class.forName(beanDefinition.getBeanClassName()); + (Class) + Class.forName(beanDefinition.getBeanClassName()); String aspectName = getAspectName(cls); ASPECT_NAME_MAP.put(cls, aspectName); } catch (ClassNotFoundException e) { @@ -288,14 +311,17 @@ private static String getAspectName(Class cls) { } private static boolean shouldCheckTypeMethod(@Nullable Class parentClazz) { - return Optional.ofNullable(parentClazz).map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")).orElse(false); + return Optional.ofNullable(parentClazz) + .map(cls -> cls.getName().startsWith(OPENAPI_PACKAGE + ".")) + .orElse(false); } private static boolean shouldCollapseClassToDiscriminator(String className) { return className.startsWith(PEGASUS_PACKAGE + "."); } - private static Optional shouldDiscriminate(String parentShortClass, String fieldName, ObjectNode node) { + private static Optional shouldDiscriminate( + String parentShortClass, String fieldName, ObjectNode node) { try { if (parentShortClass != null) { Class pegasus2AvroClazz = REFLECT_AVRO.lookupClass(parentShortClass, true); @@ -304,7 +330,8 @@ private static Optional shouldDiscriminate(String parentShortClass, Stri Schema.Field avroField = avroSchema.getField(fieldName); if (avroField.schema().isUnion()) { - Class discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } } @@ -313,7 +340,8 @@ private static Optional shouldDiscriminate(String parentShortClass, Stri Iterator itr = node.fieldNames(); itr.next(); if (!itr.hasNext()) { // only contains discriminator - Class discriminatedClazz = REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); + Class discriminatedClazz = + REFLECT_AVRO.lookupClass(node.get(DISCRIMINATOR).asText(), true); return Optional.of(discriminatedClazz.getName().replace(".pegasus2avro", "")); } @@ -326,16 +354,22 @@ private static Optional shouldDiscriminate(String parentShortClass, Stri private static void replaceDiscriminator(ObjectNode node) { replaceDiscriminator(null, null, null, node); } - private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nullable String parentDiscriminator, - @Nullable String propertyName, @Nonnull ObjectNode node) { + + private static void replaceDiscriminator( + @Nullable ObjectNode parentNode, + @Nullable String parentDiscriminator, + @Nullable String propertyName, + @Nonnull ObjectNode node) { final String discriminator; if (node.isObject() && node.has(DISCRIMINATOR)) { - Optional discriminatorClassName = shouldDiscriminate(parentDiscriminator, propertyName, node); + Optional discriminatorClassName = + shouldDiscriminate(parentDiscriminator, propertyName, node); if (parentNode != null && discriminatorClassName.isPresent()) { discriminator = node.remove(DISCRIMINATOR).asText(); parentNode.remove(propertyName); - parentNode.set(propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); + parentNode.set( + propertyName, NODE_FACTORY.objectNode().set(discriminatorClassName.get(), node)); } else { discriminator = node.remove(DISCRIMINATOR).asText(); } @@ -344,55 +378,75 @@ private static void replaceDiscriminator(@Nullable ObjectNode parentNode, @Nulla } List> objectChildren = new LinkedList<>(); - node.fields().forEachRemaining(entry -> { - if (entry.getValue().isObject()) { - objectChildren.add(entry); - } else if (entry.getValue().isArray()) { - entry.getValue().forEach(i -> { - if (i.isObject()) { - objectChildren.add(Map.entry(entry.getKey(), i)); - } - }); - } - }); + node.fields() + .forEachRemaining( + entry -> { + if (entry.getValue().isObject()) { + objectChildren.add(entry); + } else if (entry.getValue().isArray()) { + entry + .getValue() + .forEach( + i -> { + if (i.isObject()) { + objectChildren.add(Map.entry(entry.getKey(), i)); + } + }); + } + }); - objectChildren.forEach(entry -> - replaceDiscriminator(node, discriminator, entry.getKey(), (ObjectNode) entry.getValue()) - ); + objectChildren.forEach( + entry -> + replaceDiscriminator( + node, discriminator, entry.getKey(), (ObjectNode) entry.getValue())); } @Nonnull - public static GenericAspect convertGenericAspect(@Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, + public static GenericAspect convertGenericAspect( + @Nonnull io.datahubproject.openapi.generated.GenericAspect genericAspect, ObjectMapper objectMapper) { try { ObjectNode jsonTree = (ObjectNode) objectMapper.valueToTree(genericAspect).get("value"); replaceDiscriminator(jsonTree); String pretty = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonTree); - return new GenericAspect().setContentType(genericAspect.getContentType()) + return new GenericAspect() + .setContentType(genericAspect.getContentType()) .setValue(ByteString.copyString(pretty, UTF_8)); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } - public static boolean authorizeProposals(List proposals, EntityService entityService, - Authorizer authorizer, String actorUrnStr, DisjunctivePrivilegeGroup orGroup) { - List> resourceSpecs = proposals.stream() - .map(proposal -> { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); - Urn entityUrn = EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); - return Optional.of(new EntitySpec(proposal.getEntityType(), entityUrn.toString())); - }) - .collect(Collectors.toList()); + public static boolean authorizeProposals( + List proposals, + EntityService entityService, + Authorizer authorizer, + String actorUrnStr, + DisjunctivePrivilegeGroup orGroup) { + List> resourceSpecs = + proposals.stream() + .map( + proposal -> { + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); + Urn entityUrn = + EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); + return Optional.of( + new EntitySpec(proposal.getEntityType(), entityUrn.toString())); + }) + .collect(Collectors.toList()); return AuthUtil.isAuthorizedForResources(authorizer, actorUrnStr, resourceSpecs, orGroup); } - public static Pair ingestProposal(com.linkedin.mxe.MetadataChangeProposal serviceProposal, String actorUrn, + public static Pair ingestProposal( + com.linkedin.mxe.MetadataChangeProposal serviceProposal, + String actorUrn, EntityService entityService) { // TODO: Use the actor present in the IC. Timer.Context context = MetricUtils.timer("postEntity").time(); final com.linkedin.common.AuditStamp auditStamp = - new com.linkedin.common.AuditStamp().setTime(System.currentTimeMillis()) + new com.linkedin.common.AuditStamp() + .setTime(System.currentTimeMillis()) .setActor(UrnUtils.getUrn(actorUrn)); final List additionalChanges = @@ -401,20 +455,25 @@ public static Pair ingestProposal(com.linkedin.mxe.MetadataChan log.info("Proposal: {}", serviceProposal); Throwable exceptionally = null; try { - Stream proposalStream = Stream.concat(Stream.of(serviceProposal), + Stream proposalStream = + Stream.concat( + Stream.of(serviceProposal), AspectUtils.getAdditionalChanges(serviceProposal, entityService).stream()); - AspectsBatch batch = AspectsBatchImpl.builder().mcps(proposalStream.collect(Collectors.toList()), - entityService.getEntityRegistry()).build(); + AspectsBatch batch = + AspectsBatchImpl.builder() + .mcps(proposalStream.collect(Collectors.toList()), entityService.getEntityRegistry()) + .build(); - Set proposalResult = - entityService.ingestProposal(batch, auditStamp, false); + Set proposalResult = entityService.ingestProposal(batch, auditStamp, false); Urn urn = proposalResult.stream().findFirst().get().getUrn(); - return new Pair<>(urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); + return new Pair<>( + urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); } catch (ValidationException ve) { exceptionally = ve; - throw HttpClientErrorException.create(HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); + throw HttpClientErrorException.create( + HttpStatus.UNPROCESSABLE_ENTITY, ve.getMessage(), null, null, null); } catch (Exception e) { exceptionally = e; throw e; @@ -429,18 +488,23 @@ public static Pair ingestProposal(com.linkedin.mxe.MetadataChan } public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectRequest) { - MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = MetadataChangeProposal.builder(); - io.datahubproject.openapi.generated.GenericAspect - genericAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .value(aspectRequest.getAspect()) - .contentType(MediaType.APPLICATION_JSON_VALUE).build(); + MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = + MetadataChangeProposal.builder(); + io.datahubproject.openapi.generated.GenericAspect genericAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .value(aspectRequest.getAspect()) + .contentType(MediaType.APPLICATION_JSON_VALUE) + .build(); io.datahubproject.openapi.generated.GenericAspect keyAspect = null; if (aspectRequest.getEntityKeyAspect() != null) { - keyAspect = io.datahubproject.openapi.generated.GenericAspect.builder() - .contentType(MediaType.APPLICATION_JSON_VALUE) - .value(aspectRequest.getEntityKeyAspect()).build(); + keyAspect = + io.datahubproject.openapi.generated.GenericAspect.builder() + .contentType(MediaType.APPLICATION_JSON_VALUE) + .value(aspectRequest.getEntityKeyAspect()) + .build(); } - metadataChangeProposal.aspect(genericAspect) + metadataChangeProposal + .aspect(genericAspect) .changeType(io.datahubproject.openapi.generated.ChangeType.UPSERT) .aspectName(ASPECT_NAME_MAP.get(aspectRequest.getAspect().getClass())) .entityKeyAspect(keyAspect) @@ -450,9 +514,10 @@ public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectReq return metadataChangeProposal.build(); } - public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(MetadataChangeProposal metadataChangeProposal, - ObjectMapper objectMapper) { - io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = metadataChangeProposal.getAuditHeader(); + public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal( + MetadataChangeProposal metadataChangeProposal, ObjectMapper objectMapper) { + io.datahubproject.openapi.generated.KafkaAuditHeader auditHeader = + metadataChangeProposal.getAuditHeader(); com.linkedin.mxe.MetadataChangeProposal serviceProposal = new com.linkedin.mxe.MetadataChangeProposal() @@ -463,7 +528,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad } if (metadataChangeProposal.getSystemMetadata() != null) { serviceProposal.setSystemMetadata( - objectMapper.convertValue(metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); + objectMapper.convertValue( + metadataChangeProposal.getSystemMetadata(), SystemMetadata.class)); } if (metadataChangeProposal.getAspectName() != null) { serviceProposal.setAspectName(metadataChangeProposal.getAspectName()); @@ -471,7 +537,8 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad if (auditHeader != null) { KafkaAuditHeader kafkaAuditHeader = new KafkaAuditHeader(); - kafkaAuditHeader.setAuditVersion(auditHeader.getAuditVersion()) + kafkaAuditHeader + .setAuditVersion(auditHeader.getAuditVersion()) .setTime(auditHeader.getTime()) .setAppName(auditHeader.getAppName()) .setMessageId(new UUID(ByteString.copyString(auditHeader.getMessageId(), UTF_8))) @@ -491,30 +558,40 @@ public static com.linkedin.mxe.MetadataChangeProposal mapToServiceProposal(Metad serviceProposal.setAuditHeader(kafkaAuditHeader); } - serviceProposal = metadataChangeProposal.getEntityKeyAspect() != null - ? serviceProposal.setEntityKeyAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getEntityKeyAspect(), objectMapper)) - : serviceProposal; - serviceProposal = metadataChangeProposal.getAspect() != null - ? serviceProposal.setAspect( - MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) - : serviceProposal; + serviceProposal = + metadataChangeProposal.getEntityKeyAspect() != null + ? serviceProposal.setEntityKeyAspect( + MappingUtil.convertGenericAspect( + metadataChangeProposal.getEntityKeyAspect(), objectMapper)) + : serviceProposal; + serviceProposal = + metadataChangeProposal.getAspect() != null + ? serviceProposal.setAspect( + MappingUtil.convertGenericAspect(metadataChangeProposal.getAspect(), objectMapper)) + : serviceProposal; return serviceProposal; } - public static RollbackRunResultDto mapRollbackRunResult(RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { - List aspectRowSummaries = rollbackRunResult.getRowsRolledBack().stream() - .map(aspectRowSummary -> objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) - .collect(Collectors.toList()); + public static RollbackRunResultDto mapRollbackRunResult( + RollbackRunResult rollbackRunResult, ObjectMapper objectMapper) { + List aspectRowSummaries = + rollbackRunResult.getRowsRolledBack().stream() + .map( + aspectRowSummary -> + objectMapper.convertValue(aspectRowSummary.data(), AspectRowSummary.class)) + .collect(Collectors.toList()); return RollbackRunResultDto.builder() .rowsRolledBack(aspectRowSummaries) - .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()).build(); + .rowsDeletedFromEntityDeletion(rollbackRunResult.getRowsDeletedFromEntityDeletion()) + .build(); } public static UpsertAspectRequest createStatusRemoval(Urn urn, EntityService entityService) { - com.linkedin.metadata.models.EntitySpec entitySpec = entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); + com.linkedin.metadata.models.EntitySpec entitySpec = + entityService.getEntityRegistry().getEntitySpec(urn.getEntityType()); if (entitySpec == null || !entitySpec.getAspectSpecMap().containsKey(STATUS_ASPECT_NAME)) { - throw new IllegalArgumentException("Entity type is not valid for soft deletes: " + urn.getEntityType()); + throw new IllegalArgumentException( + "Entity type is not valid for soft deletes: " + urn.getEntityType()); } return UpsertAspectRequest.builder() .aspect(Status.builder().removed(true).build()) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java index 12f7652aff587..31577429df72d 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/ReflectionCache.java @@ -2,9 +2,6 @@ import com.google.common.reflect.ClassPath; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.extern.slf4j.Slf4j; - import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -16,123 +13,132 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.Builder; +import lombok.extern.slf4j.Slf4j; @Slf4j @Builder public class ReflectionCache { - private static final ConcurrentHashMap METHOD_CACHE = new ConcurrentHashMap<>(); - private static final ConcurrentHashMap> CLASS_CACHE = new ConcurrentHashMap<>(); - - private final String basePackage; - private final Set subPackages; - @Builder.Default // appropriate for lombok - private final Function, String> getBuilderName = clazz -> - String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); - - public static class ReflectionCacheBuilder { - public ReflectionCacheBuilder basePackage(String basePackage) { - return basePackage(basePackage, Set.of()); - } - - public ReflectionCacheBuilder basePackage(String basePackage, Set packageExclusions) { - this.basePackage = basePackage; - return subPackages(findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); - } - - private ReflectionCacheBuilder subPackages(Set subPackages) { - this.subPackages = subPackages; - return this; - } - - private Set findSubPackages(String packageName, Set exclusions) { - try { - return ClassPath.from(getClass().getClassLoader()) - .getAllClasses() - .stream() - .filter(clazz -> exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) - && !clazz.getName().contains("$") && clazz.getName().startsWith(packageName)) - .map(ClassPath.ClassInfo::getPackageName) - .collect(Collectors.toSet()); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - - public Method lookupMethod(Class clazz, String method, Class... parameters) { - if (clazz == null) { - return null; - } else { - return METHOD_CACHE.computeIfAbsent( - String.join("_", clazz.getName(), method), - key -> { - try { - log.debug("Lookup: " + clazz.getName() + " Method: " + method + " Parameters: " + Arrays.toString(parameters)); - return clazz.getDeclaredMethod(method, parameters); - } catch (NoSuchMethodException e) { - return null; - } - } - ); - } - } - - public Class lookupClass(String className, boolean searchSubclass) { - if (!searchSubclass) { - return lookupClass(className); - } else { - List subclasses = new LinkedList<>(); - subclasses.add(basePackage); - if (subPackages != null) { - subclasses.addAll(subPackages); - } - - for (String packageName : subclasses) { - try { - return cachedClassLookup(packageName, className); - } catch (Exception e) { - log.debug("Class not found {}.{} ... continuing search", packageName, className); - } - } - } - throw new ClassCastException(String.format("Could not locate %s in package %s", className, basePackage)); + private static final ConcurrentHashMap METHOD_CACHE = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap> CLASS_CACHE = new ConcurrentHashMap<>(); + + private final String basePackage; + private final Set subPackages; + @Builder.Default // appropriate for lombok + private final Function, String> getBuilderName = + clazz -> String.join("", clazz.getSimpleName(), "$", clazz.getSimpleName(), "Builder"); + + public static class ReflectionCacheBuilder { + public ReflectionCacheBuilder basePackage(String basePackage) { + return basePackage(basePackage, Set.of()); } - public Class lookupClass(String className) { - return cachedClassLookup(basePackage, className); + public ReflectionCacheBuilder basePackage(String basePackage, Set packageExclusions) { + this.basePackage = basePackage; + return subPackages( + findSubPackages(basePackage, Optional.ofNullable(packageExclusions).orElse(Set.of()))); } - private Class cachedClassLookup(String packageName, String className) { - return CLASS_CACHE.computeIfAbsent( - String.format("%s.%s", packageName, className), - key -> { - try { - log.debug("Lookup: " + key); - return Class.forName(key); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - ); + private ReflectionCacheBuilder subPackages(Set subPackages) { + this.subPackages = subPackages; + return this; } - /** - * Get builder instance and class - */ - public Pair, Object> getBuilder(Class toClazz) throws InvocationTargetException, IllegalAccessException { - Class toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); - return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + private Set findSubPackages(String packageName, Set exclusions) { + try { + return ClassPath.from(getClass().getClassLoader()).getAllClasses().stream() + .filter( + clazz -> + exclusions.stream().noneMatch(excl -> clazz.getPackageName().startsWith(excl)) + && !clazz.getName().contains("$") + && clazz.getName().startsWith(packageName)) + .map(ClassPath.ClassInfo::getPackageName) + .collect(Collectors.toSet()); + } catch (IOException e) { + throw new RuntimeException(e); + } } - - public Method lookupMethod(Pair, Object> builderPair, String method, Class... parameters) { - return lookupMethod(builderPair.getFirst(), method, parameters); - } - - public static String toLowerFirst(String s) { - return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public Method lookupMethod(Class clazz, String method, Class... parameters) { + if (clazz == null) { + return null; + } else { + return METHOD_CACHE.computeIfAbsent( + String.join("_", clazz.getName(), method), + key -> { + try { + log.debug( + "Lookup: " + + clazz.getName() + + " Method: " + + method + + " Parameters: " + + Arrays.toString(parameters)); + return clazz.getDeclaredMethod(method, parameters); + } catch (NoSuchMethodException e) { + return null; + } + }); } - - public static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + } + + public Class lookupClass(String className, boolean searchSubclass) { + if (!searchSubclass) { + return lookupClass(className); + } else { + List subclasses = new LinkedList<>(); + subclasses.add(basePackage); + if (subPackages != null) { + subclasses.addAll(subPackages); + } + + for (String packageName : subclasses) { + try { + return cachedClassLookup(packageName, className); + } catch (Exception e) { + log.debug("Class not found {}.{} ... continuing search", packageName, className); + } + } } + throw new ClassCastException( + String.format("Could not locate %s in package %s", className, basePackage)); + } + + public Class lookupClass(String className) { + return cachedClassLookup(basePackage, className); + } + + private Class cachedClassLookup(String packageName, String className) { + return CLASS_CACHE.computeIfAbsent( + String.format("%s.%s", packageName, className), + key -> { + try { + log.debug("Lookup: " + key); + return Class.forName(key); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + }); + } + + /** Get builder instance and class */ + public Pair, Object> getBuilder(Class toClazz) + throws InvocationTargetException, IllegalAccessException { + Class toClazzBuilder = lookupClass(getBuilderName.apply(toClazz)); + return Pair.of(toClazzBuilder, lookupMethod(toClazz, "builder").invoke(null)); + } + + public Method lookupMethod( + Pair, Object> builderPair, String method, Class... parameters) { + return lookupMethod(builderPair.getFirst(), method, parameters); + } + + public static String toLowerFirst(String s) { + return s.substring(0, 1).toLowerCase() + s.substring(1); + } + + public static String toUpperFirst(String s) { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 6c2ec108fe493..06640ba13fb8b 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -1,13 +1,17 @@ package entities; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.AuthorizerChain; -import com.linkedin.metadata.config.PreProcessHooks; import com.fasterxml.jackson.databind.ObjectMapper; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.event.EventProducer; @@ -32,13 +36,12 @@ import io.datahubproject.openapi.generated.SubTypes; import io.datahubproject.openapi.generated.TagAssociation; import io.datahubproject.openapi.generated.ViewProperties; +import io.ebean.Transaction; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.function.Function; - -import io.ebean.Transaction; import mock.MockEntityRegistry; import mock.MockEntityService; import org.mockito.ArgumentMatchers; @@ -46,15 +49,11 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.*; - - public class EntitiesControllerTest { public static final String S = "somerandomstring"; - public static final String DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; + public static final String DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:platform,name,PROD)"; public static final String CORPUSER_URN = "urn:li:corpuser:datahub"; public static final String GLOSSARY_TERM_URN = "urn:li:glossaryTerm:SavingAccount"; public static final String DATA_PLATFORM_URN = "urn:li:dataPlatform:platform"; @@ -62,25 +61,38 @@ public class EntitiesControllerTest { @BeforeMethod public void setup() - throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException { + throws NoSuchMethodException, + InvocationTargetException, + InstantiationException, + IllegalAccessException { EntityRegistry mockEntityRegistry = new MockEntityRegistry(); AspectDao aspectDao = Mockito.mock(AspectDao.class); - Mockito.when(aspectDao.runInTransactionWithRetry( - ArgumentMatchers.>any(), any(), anyInt())).thenAnswer(i -> - ((Function) i.getArgument(0)).apply(Mockito.mock(Transaction.class)) - ); + Mockito.when( + aspectDao.runInTransactionWithRetry( + ArgumentMatchers.>any(), any(), anyInt())) + .thenAnswer( + i -> + ((Function) i.getArgument(0)) + .apply(Mockito.mock(Transaction.class))); EventProducer mockEntityEventProducer = Mockito.mock(EventProducer.class); UpdateIndicesService mockUpdateIndicesService = mock(UpdateIndicesService.class); PreProcessHooks preProcessHooks = new PreProcessHooks(); preProcessHooks.setUiEnabled(true); - MockEntityService mockEntityService = new MockEntityService(aspectDao, mockEntityEventProducer, mockEntityRegistry, - mockUpdateIndicesService, preProcessHooks); + MockEntityService mockEntityService = + new MockEntityService( + aspectDao, + mockEntityEventProducer, + mockEntityRegistry, + mockUpdateIndicesService, + preProcessHooks); AuthorizerChain authorizerChain = Mockito.mock(AuthorizerChain.class); - _entitiesController = new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); + _entitiesController = + new EntitiesController(mockEntityService, new ObjectMapper(), authorizerChain); Authentication authentication = Mockito.mock(Authentication.class); when(authentication.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - when(authorizerChain.authorize(any())).thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); + when(authorizerChain.authorize(any())) + .thenReturn(new AuthorizationResult(null, AuthorizationResult.Type.ALLOW, "")); AuthenticationContext.setAuthentication(authentication); } @@ -89,98 +101,130 @@ public void setup() @Test public void testIngestDataset() { List datasetAspects = new ArrayList<>(); - UpsertAspectRequest viewProperties = UpsertAspectRequest.builder() - .aspect(ViewProperties.builder() - .viewLogic(S) - .viewLanguage(S) - .materialized(true).build()) - .entityType(DATASET_ENTITY_NAME) - .entityUrn(DATASET_URN) - .build(); + UpsertAspectRequest viewProperties = + UpsertAspectRequest.builder() + .aspect( + ViewProperties.builder().viewLogic(S).viewLanguage(S).materialized(true).build()) + .entityType(DATASET_ENTITY_NAME) + .entityUrn(DATASET_URN) + .build(); datasetAspects.add(viewProperties); - UpsertAspectRequest subTypes = UpsertAspectRequest.builder() - .aspect(SubTypes.builder() - .typeNames(Collections.singletonList(S)).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest subTypes = + UpsertAspectRequest.builder() + .aspect(SubTypes.builder().typeNames(Collections.singletonList(S)).build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(subTypes); - UpsertAspectRequest datasetProfile = UpsertAspectRequest.builder() - .aspect(DatasetProfile.builder().build().timestampMillis(0L).addFieldProfilesItem( - DatasetFieldProfile.builder() - .fieldPath(S) - .histogram(Histogram.builder() - .boundaries(Collections.singletonList(S)).build()).build() - ) - ) + UpsertAspectRequest datasetProfile = + UpsertAspectRequest.builder() + .aspect( + DatasetProfile.builder() + .build() + .timestampMillis(0L) + .addFieldProfilesItem( + DatasetFieldProfile.builder() + .fieldPath(S) + .histogram( + Histogram.builder() + .boundaries(Collections.singletonList(S)) + .build()) + .build())) .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() + .entityKeyAspect( + DatasetKey.builder() .name("name") .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) + .origin(FabricType.PROD) + .build()) .build(); datasetAspects.add(datasetProfile); - UpsertAspectRequest schemaMetadata = UpsertAspectRequest.builder() - .aspect(SchemaMetadata.builder() - .schemaName(S) - .dataset(DATASET_URN) - .platform(DATA_PLATFORM_URN) - .hash(S) - .version(0L) - .platformSchema(MySqlDDL.builder().tableSchema(S).build()) - .fields(Collections.singletonList(SchemaField.builder() - .fieldPath(S) - .nativeDataType(S) - .type(SchemaFieldDataType.builder().type(StringType.builder().build()).build()) - .description(S) - .globalTags(GlobalTags.builder() - .tags(Collections.singletonList(TagAssociation.builder() - .tag(TAG_URN).build())).build()) - .glossaryTerms(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()).build() - ) - ).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest schemaMetadata = + UpsertAspectRequest.builder() + .aspect( + SchemaMetadata.builder() + .schemaName(S) + .dataset(DATASET_URN) + .platform(DATA_PLATFORM_URN) + .hash(S) + .version(0L) + .platformSchema(MySqlDDL.builder().tableSchema(S).build()) + .fields( + Collections.singletonList( + SchemaField.builder() + .fieldPath(S) + .nativeDataType(S) + .type( + SchemaFieldDataType.builder() + .type(StringType.builder().build()) + .build()) + .description(S) + .globalTags( + GlobalTags.builder() + .tags( + Collections.singletonList( + TagAssociation.builder().tag(TAG_URN).build())) + .build()) + .glossaryTerms( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder() + .urn(GLOSSARY_TERM_URN) + .build())) + .auditStamp( + AuditStamp.builder() + .time(0L) + .actor(CORPUSER_URN) + .build()) + .build()) + .build())) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(schemaMetadata); - UpsertAspectRequest glossaryTerms = UpsertAspectRequest.builder() - .aspect(GlossaryTerms.builder() - .terms(Collections.singletonList(GlossaryTermAssociation.builder() - .urn(GLOSSARY_TERM_URN).build())) - .auditStamp(AuditStamp.builder() - .time(0L) - .actor(CORPUSER_URN).build()).build()) - .entityType(DATASET_ENTITY_NAME) - .entityKeyAspect(DatasetKey.builder() - .name("name") - .platform(DATA_PLATFORM_URN) - .origin(FabricType.PROD).build()) - .build(); + UpsertAspectRequest glossaryTerms = + UpsertAspectRequest.builder() + .aspect( + GlossaryTerms.builder() + .terms( + Collections.singletonList( + GlossaryTermAssociation.builder().urn(GLOSSARY_TERM_URN).build())) + .auditStamp(AuditStamp.builder().time(0L).actor(CORPUSER_URN).build()) + .build()) + .entityType(DATASET_ENTITY_NAME) + .entityKeyAspect( + DatasetKey.builder() + .name("name") + .platform(DATA_PLATFORM_URN) + .origin(FabricType.PROD) + .build()) + .build(); datasetAspects.add(glossaryTerms); _entitiesController.postEntities(datasetAspects); } -// @Test -// public void testGetDataset() { -// _entitiesController.getEntities(new String[] {DATASET_URN}, -// new String[] { -// SCHEMA_METADATA_ASPECT_NAME -// }); -// } + // @Test + // public void testGetDataset() { + // _entitiesController.getEntities(new String[] {DATASET_URN}, + // new String[] { + // SCHEMA_METADATA_ASPECT_NAME + // }); + // } } diff --git a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java index 852b6cfcb4b22..91e9e4fd4671e 100644 --- a/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java +++ b/metadata-service/openapi-servlet/src/test/java/mock/MockEntityService.java @@ -1,6 +1,7 @@ package mock; -import com.linkedin.metadata.config.PreProcessHooks; +import static entities.EntitiesControllerTest.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -20,6 +21,7 @@ import com.linkedin.entity.AspectType; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.ListResult; @@ -49,22 +51,25 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import static entities.EntitiesControllerTest.*; - - public class MockEntityService extends EntityServiceImpl { - public MockEntityService(@Nonnull AspectDao aspectDao, @Nonnull EventProducer producer, @Nonnull EntityRegistry entityRegistry, @Nonnull - UpdateIndicesService updateIndicesService, PreProcessHooks preProcessHooks) { + public MockEntityService( + @Nonnull AspectDao aspectDao, + @Nonnull EventProducer producer, + @Nonnull EntityRegistry entityRegistry, + @Nonnull UpdateIndicesService updateIndicesService, + PreProcessHooks preProcessHooks) { super(aspectDao, producer, entityRegistry, true, updateIndicesService, preProcessHooks); } @Override - public Map> getLatestAspects(@Nonnull Set urns, @Nonnull Set aspectNames) { + public Map> getLatestAspects( + @Nonnull Set urns, @Nonnull Set aspectNames) { return null; } @Override - public Map getLatestAspectsForUrn(@Nonnull Urn urn, @Nonnull Set aspectNames) { + public Map getLatestAspectsForUrn( + @Nonnull Urn urn, @Nonnull Set aspectNames) { return Collections.emptyMap(); } @@ -74,42 +79,58 @@ public RecordTemplate getAspect(@Nonnull Urn urn, @Nonnull String aspectName, lo } @Override - public Map> getLatestEnvelopedAspects(@Nonnull String entityName, @Nonnull Set urns, - @Nonnull Set aspectNames) throws URISyntaxException { + public Map> getLatestEnvelopedAspects( + @Nonnull String entityName, @Nonnull Set urns, @Nonnull Set aspectNames) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(DATASET_URN); Map> envelopedAspectMap = new HashMap<>(); List aspects = new ArrayList<>(); EnvelopedAspect schemaMetadata = new EnvelopedAspect(); SchemaMetadata pegasusSchemaMetadata = new SchemaMetadata(); - pegasusSchemaMetadata.setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) + pegasusSchemaMetadata + .setDataset(DatasetUrn.createFromUrn(UrnUtils.getUrn(DATASET_URN))) .setVersion(0L) - .setCreated(new AuditStamp().setActor(UrnUtils.getUrn(CORPUSER_URN)).setTime(System.currentTimeMillis())) + .setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(CORPUSER_URN)) + .setTime(System.currentTimeMillis())) .setHash(S) .setCluster(S) .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL().setTableSchema(S))) - .setForeignKeys(new ForeignKeyConstraintArray(Collections.singletonList( - new ForeignKeyConstraint() - .setForeignDataset(urn) - .setName(S) - .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) - .setFields(new SchemaFieldArray(Collections.singletonList( - new SchemaField() - .setDescription(S) - .setFieldPath(S) - .setType(new SchemaFieldDataType().setType(SchemaFieldDataType.Type.create(new StringType()))) - .setGlobalTags( - new GlobalTags() - .setTags(new TagAssociationArray(Collections.singletonList( - new TagAssociation().setTag(TagUrn.createFromUrn(UrnUtils.getUrn(TAG_URN))) - )))) - .setGlossaryTerms(new GlossaryTerms().setTerms( - new GlossaryTermAssociationArray(Collections.singletonList( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(UrnUtils.getUrn(GLOSSARY_TERM_URN))) - ))) - ) - )) - ); + .setForeignKeys( + new ForeignKeyConstraintArray( + Collections.singletonList( + new ForeignKeyConstraint() + .setForeignDataset(urn) + .setName(S) + .setForeignFields(new UrnArray(Collections.singletonList(urn)))))) + .setFields( + new SchemaFieldArray( + Collections.singletonList( + new SchemaField() + .setDescription(S) + .setFieldPath(S) + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + Collections.singletonList( + new TagAssociation() + .setTag( + TagUrn.createFromUrn( + UrnUtils.getUrn(TAG_URN))))))) + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + Collections.singletonList( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + UrnUtils.getUrn(GLOSSARY_TERM_URN)))))))))); schemaMetadata .setType(AspectType.VERSIONED) .setName("schemaMetadata") @@ -120,29 +141,31 @@ public Map> getLatestEnvelopedAspects(@Nonnull String } @Override - public Map> getVersionedEnvelopedAspects(@Nonnull Set versionedUrns, - @Nonnull Set aspectNames) throws URISyntaxException { + public Map> getVersionedEnvelopedAspects( + @Nonnull Set versionedUrns, @Nonnull Set aspectNames) + throws URISyntaxException { return null; } @Override - public EnvelopedAspect getLatestEnvelopedAspect(@Nonnull String entityName, @Nonnull Urn urn, - @Nonnull String aspectName) throws Exception { + public EnvelopedAspect getLatestEnvelopedAspect( + @Nonnull String entityName, @Nonnull Urn urn, @Nonnull String aspectName) throws Exception { return null; } @Override - public VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version) { + public VersionedAspect getVersionedAspect( + @Nonnull Urn urn, @Nonnull String aspectName, long version) { return null; } @Override - public ListResult listLatestAspects(@Nonnull String entityName, @Nonnull String aspectName, int start, - int count) { + public ListResult listLatestAspects( + @Nonnull String entityName, @Nonnull String aspectName, int start, int count) { return null; } -/* @Nonnull + /* @Nonnull @Override protected UpdateAspectResult ingestAspectToLocalDB(@Nonnull Urn urn, @Nonnull String aspectName, @Nonnull Function, RecordTemplate> updateLambda, @Nonnull AuditStamp auditStamp, @@ -161,8 +184,12 @@ protected List> ingestAspectsToLocalDB(@Nonnull @Nullable @Override - public RecordTemplate ingestAspectIfNotPresent(@NotNull Urn urn, @NotNull String aspectName, - @NotNull RecordTemplate newValue, @NotNull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata) { + public RecordTemplate ingestAspectIfNotPresent( + @NotNull Urn urn, + @NotNull String aspectName, + @NotNull RecordTemplate newValue, + @NotNull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata) { return null; } @@ -172,13 +199,11 @@ public ListUrnsResult listUrns(@Nonnull String entityName, int start, int count) } @Override - public void setWritable(boolean canWrite) { - - } + public void setWritable(boolean canWrite) {} @Override - public RollbackRunResult rollbackWithConditions(List aspectRows, Map conditions, - boolean hardDelete) { + public RollbackRunResult rollbackWithConditions( + List aspectRows, Map conditions, boolean hardDelete) { return null; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java index e632aa7eadff0..17163b937f417 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthParam.java @@ -4,32 +4,29 @@ import java.util.Optional; import lombok.Data; - /** * POJO for YAML section presents in config.yml at location plugins[].params. * - * These parameters are same for Authenticator and Authorizer plugins. + *

These parameters are same for Authenticator and Authorizer plugins. * - * {@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to create instance of - * either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} + *

{@link com.datahub.plugins.auth.provider.AuthPluginConfigProvider} uses this AuthParam to + * create instance of either {@link AuthenticatorPluginConfig} or {@link AuthorizerPluginConfig} */ @Data public class AuthParam { - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * Default jarFileName is ".jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is ".jar". If plugin's jar file name is different from default + * value then set this property. */ private Optional jarFileName = Optional.empty(); /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional> configs = Optional.empty(); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java index b4546d9f5af16..8bc06c73a9439 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthPluginConfig.java @@ -9,16 +9,19 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} - */ +/** Superclass for {@link AuthenticatorPluginConfig} and {@link AuthorizerPluginConfig} */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthPluginConfig extends PluginConfig { - public AuthPluginConfig(PluginType type, String name, Boolean enabled, String className, Path pluginHomeDirectory, - Path pluginJarPath, Optional> configs) { + public AuthPluginConfig( + PluginType type, + String name, + Boolean enabled, + String className, + Path pluginHomeDirectory, + Path pluginJarPath, + Optional> configs) { super(type, name, enabled, className, pluginHomeDirectory, pluginJarPath, configs); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java index 276faed56f7ab..b10a178caa9fa 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthenticatorPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authenticator plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class + * Authenticator plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthenticatorPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthenticatorPluginConfig extends AuthPluginConfig { - public AuthenticatorPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthenticatorPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional> configs) { super(PluginType.AUTHENTICATOR, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java index 1a4bd1ea07906..de8c3d7ecaaa4 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/configuration/AuthorizerPluginConfig.java @@ -8,16 +8,20 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - /** - * Authorizer plugin configuration provided by user. - * {@link com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class + * Authorizer plugin configuration provided by user. {@link + * com.datahub.plugins.auth.provider.AuthorizerPluginConfigProvider} instantiate this class */ @Data @NoArgsConstructor @EqualsAndHashCode(callSuper = false) public class AuthorizerPluginConfig extends AuthPluginConfig { - public AuthorizerPluginConfig(String name, Boolean enabled, String className, Path pluginDirectory, Path pluginJar, + public AuthorizerPluginConfig( + String name, + Boolean enabled, + String className, + Path pluginDirectory, + Path pluginJar, Optional> configs) { super(PluginType.AUTHORIZER, name, enabled, className, pluginDirectory, pluginJar, configs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java index b970258aa3ea0..4e62d03620f7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthPluginConfigProvider.java @@ -11,9 +11,9 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - /** - * Base class for {@link AuthenticatorPluginConfigProvider} and {@link AuthorizerPluginConfigProvider}. + * Base class for {@link AuthenticatorPluginConfigProvider} and {@link + * AuthorizerPluginConfigProvider}. */ public abstract class AuthPluginConfigProvider implements PluginConfigProvider { public abstract PluginType getType(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java index 546cee04d05a0..71563e79ef787 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthenticatorPluginConfigProvider.java @@ -9,10 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthenticatorPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator + * Responsible for creating {@link AuthenticatorPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authenticator * configuration */ public class AuthenticatorPluginConfigProvider extends AuthPluginConfigProvider { @@ -24,13 +23,19 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + AuthParam authParam = + (new YamlMapper()).fromMap(pluginConfig.getParams(), AuthParam.class); + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthenticatorPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthenticatorPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } - diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java index 397dc3fd93b36..7899f55523595 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/auth/provider/AuthorizerPluginConfigProvider.java @@ -9,11 +9,9 @@ import java.nio.file.Path; import javax.annotation.Nonnull; - /** - * Responsible for creating {@link AuthorizerPluginConfig} instance. - * This provider is register in {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer - * configuration + * Responsible for creating {@link AuthorizerPluginConfig} instance. This provider is register in + * {@link com.datahub.plugins.factory.PluginConfigFactory} as provider of Authorizer configuration */ public class AuthorizerPluginConfigProvider extends AuthPluginConfigProvider { @Override @@ -24,13 +22,20 @@ public PluginType getType() { @Override public AuthPluginConfig createAuthPluginConfig(@Nonnull PluginConfig pluginConfig) { // Map Yaml section present in config.yml at plugins[].params to AuthParam - AuthParam authParam = (new YamlMapper()).fromMap(pluginConfig.getParams(), AuthParam.class); + AuthParam authParam = + (new YamlMapper()).fromMap(pluginConfig.getParams(), AuthParam.class); - // Make the pluginJar file path either from name of plugin or explicitly from plugins[].params.jarFileName + // Make the pluginJar file path either from name of plugin or explicitly from + // plugins[].params.jarFileName // This logic is common for authenticator and authorizer plugin and hence define in superclass Path pluginJar = formPluginJar(pluginConfig, authParam); - return new AuthorizerPluginConfig(pluginConfig.getName(), pluginConfig.getEnabled(), authParam.getClassName(), - pluginConfig.getPluginHomeDirectory(), pluginJar, authParam.getConfigs()); + return new AuthorizerPluginConfig( + pluginConfig.getName(), + pluginConfig.getEnabled(), + authParam.getClassName(), + pluginConfig.getPluginHomeDirectory(), + pluginJar, + authParam.getConfigs()); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java index c4dc94b7c73d5..ba15fea2ccd50 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/ConfigValidationUtils.java @@ -7,15 +7,10 @@ import javax.annotation.Nonnull; import org.apache.commons.lang3.StringUtils; - -/** - * Common validations. - * Used in {@link com.datahub.plugins.configuration.PluginConfig} - */ +/** Common validations. Used in {@link com.datahub.plugins.configuration.PluginConfig} */ public class ConfigValidationUtils { - private ConfigValidationUtils() { - } + private ConfigValidationUtils() {} public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull String value) throws IllegalArgumentException { @@ -25,7 +20,8 @@ public static void whiteSpacesValidation(@Nonnull String fieldName, @Nonnull Str } } - public static void mapShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull Map attributeMap) + public static void mapShouldNotBeEmpty( + @Nonnull String fieldName, @Nonnull Map attributeMap) throws IllegalArgumentException { if (attributeMap.isEmpty()) { throw new IllegalArgumentException(String.format("%s should not be empty", fieldName)); @@ -39,15 +35,18 @@ public static void listShouldNotBeEmpty(@Nonnull String fieldName, @Nonnull List } } - public static void listShouldNotHaveDuplicate(@Nonnull String fieldName, @Nonnull List list) { + public static void listShouldNotHaveDuplicate( + @Nonnull String fieldName, @Nonnull List list) { Set set = new HashSet<>(); - list.forEach((input) -> { - if (set.contains(input)) { - throw new IllegalArgumentException( - String.format("Duplicate entry of %s is found in %s. %s should not contain duplicate", input, fieldName, - fieldName)); - } - set.add(input); - }); + list.forEach( + (input) -> { + if (set.contains(input)) { + throw new IllegalArgumentException( + String.format( + "Duplicate entry of %s is found in %s. %s should not contain duplicate", + input, fieldName, fieldName)); + } + set.add(input); + }); } } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java index 02b3b4566d705..dfc26041ee627 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfig.java @@ -7,52 +7,43 @@ import lombok.Data; import lombok.NoArgsConstructor; - -/** - * Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params - */ +/** Flat form of plugin configuration configured in config.yaml at plugins[] and plugins[].params */ @Data @NoArgsConstructor @AllArgsConstructor public class PluginConfig { - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; - /** - * name of the plugin. It should be unique in plugins[] list - */ + /** name of the plugin. It should be unique in plugins[] list */ private String name; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; - /** - * Fully-qualified class-name of plugin - */ + /** Fully-qualified class-name of plugin */ private String className; /** - * It is always set to /. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to /. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ private Path pluginHomeDirectory; /** - * Default jarFileName is ".jar". If plugin's jar file name is different from default value then set - * this property. + * Default jarFileName is ".jar". If plugin's jar file name is different from default + * value then set this property. */ private Path pluginJarPath; /** - * These configs are specific to plugin. GMS pass this map as is to plugin - * {@link com.datahub.plugins.auth.authentication.Authenticator} or - * {@link com.datahub.plugins.auth.authorization.Authorizer} init method + * These configs are specific to plugin. GMS pass this map as is to plugin {@link + * com.datahub.plugins.auth.authentication.Authenticator} or {@link + * com.datahub.plugins.auth.authorization.Authorizer} init method */ private Optional> configs; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java index b1b0844f428b7..b068a009528d3 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginConfigProvider.java @@ -2,7 +2,6 @@ import java.util.List; - public interface PluginConfigProvider { List processConfig(List pluginConfigConfigs); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java index 0a46be21155b6..713f5683a82a1 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginPermissionManager.java @@ -3,13 +3,12 @@ import java.nio.file.Path; import java.security.ProtectionDomain; - -/** - * Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. - */ +/** Implement this interface to create Java SecurityManager's ProtectionDomain for the plugin. */ public interface PluginPermissionManager { /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java index ed3bf0a4f4473..7db9b7d40276e 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/PluginType.java @@ -1,17 +1,11 @@ package com.datahub.plugins.common; -/** - * Supported plugin types - */ +/** Supported plugin types */ public enum PluginType { - /** - * PluginType for Authenticator plugin - */ + /** PluginType for Authenticator plugin */ AUTHENTICATOR, - /** - * PluginType for Authorizer plugin - */ + /** PluginType for Authorizer plugin */ AUTHORIZER; @Override diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java index 7ab0032b86497..3eb01659eb99f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/SecurityMode.java @@ -7,28 +7,22 @@ import java.security.Permissions; import java.util.function.Function; - -/** - * Supported security modes - */ +/** Supported security modes */ public enum SecurityMode { /** * In this mode plugins has limited access. * - * Plugins are allowed to connect on below ports only - * 1) port greater than 1024 - * 2) port 80 - * 3) port 443 - * All other ports connection are disallowed. + *

Plugins are allowed to connect on below ports only 1) port greater than 1024 2) port 80 3) + * port 443 All other ports connection are disallowed. * - * Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other read/write access are - * denied. + *

Plugins are allowed to read and write files on PLUGIN_HOME directory only and all other + * read/write access are denied. */ RESTRICTED(SecurityMode::restrictModePermissionSupplier), /** - * Plugins has full access. - * In this mode plugin can read/write to any directory, can connect to any port and can read environment variables. + * Plugins has full access. In this mode plugin can read/write to any directory, can connect to + * any port and can read environment variables. */ LENIENT(SecurityMode::lenientModePermissionSupplier); @@ -43,9 +37,12 @@ private static Permissions restrictModePermissionSupplier(Path sourceCodeDirecto permissions.add(new FilePermission(sourceCodeDirectory.toString() + "/*", "read,write,delete")); permissions.add( - new SocketPermission("*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 - permissions.add(new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port - permissions.add(new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port + new SocketPermission( + "*:1024-", "connect,resolve")); // Allow to connect access to all socket above 1024 + permissions.add( + new SocketPermission("*:80", "connect,resolve")); // Allow to connect access to HTTP port + permissions.add( + new SocketPermission("*:443", "connect,resolve")); // Allow to connect access to HTTPS port return permissions; } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java index c4a79e9434923..309bbfb1b6485 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/common/YamlMapper.java @@ -9,16 +9,13 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * - * A mapper to map plugin configuration to java Pojo classes - */ +/** A mapper to map plugin configuration to java Pojo classes */ public class YamlMapper { private final ObjectMapper objectMapper; public YamlMapper() { - this.objectMapper = YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); + this.objectMapper = + YAMLMapper.builder().enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS).build(); objectMapper.registerModule(new Jdk8Module()); } diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java index 6cf1966787875..ff87176ebbd7f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/Config.java @@ -10,17 +10,13 @@ import lombok.Builder; import lombok.Getter; - -/** - * {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml - */ +/** {@link Config} is getting loaded from /etc/datahub/plugins/auth/config.yaml */ @Getter @Builder @JsonDeserialize(builder = Config.CustomBuilder.class) public class Config { - public static final String FIELD_PLUGINS = "plugins"; // for validation error messages - @Nonnull - private List plugins; + public static final String FIELD_PLUGINS = "plugins"; // for validation error messages + @Nonnull private List plugins; public static CustomBuilder builder() { return new CustomBuilder(); @@ -29,12 +25,14 @@ public static CustomBuilder builder() { @JsonPOJOBuilder(withPrefix = "") public static class CustomBuilder extends ConfigBuilder { public Config build() { - ConfigValidationUtils.listShouldNotBeEmpty(FIELD_PLUGINS, Collections.singletonList(super.plugins)); + ConfigValidationUtils.listShouldNotBeEmpty( + FIELD_PLUGINS, Collections.singletonList(super.plugins)); List list = new ArrayList<>(super.plugins.size()); - super.plugins.forEach((pluginConfig) -> { - list.add(pluginConfig.getName()); - }); + super.plugins.forEach( + (pluginConfig) -> { + list.add(pluginConfig.getName()); + }); ConfigValidationUtils.listShouldNotHaveDuplicate(FIELD_PLUGINS, list); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java index ac2590209f4db..0c371263eea5f 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/ConfigProvider.java @@ -7,19 +7,19 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConfigProvider { public static final String CONFIG_FILE_NAME = "config.yml"; /** - * Yaml file path of plugin configuration file. Content of this file should match with {@link Config} + * Yaml file path of plugin configuration file. Content of this file should match with {@link + * Config} */ private final Path configFilePath; /** - * Directory where all plugins are mounted in DataHub GMS. - * Default pluginBaseDir is /etc/datahub/plugins/auth. + * Directory where all plugins are mounted in DataHub GMS. Default pluginBaseDir is + * /etc/datahub/plugins/auth. */ private final Path pluginBaseDir; @@ -36,7 +36,8 @@ private void setPluginDir(@Nonnull PluginConfig pluginConfig) { public Optional load() { // Check config file should exist if (!this.configFilePath.toFile().exists()) { - log.warn("Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); + log.warn( + "Configuration {} file not found at location {}", CONFIG_FILE_NAME, this.pluginBaseDir); return Optional.empty(); } @@ -45,4 +46,4 @@ public Optional load() { config.getPlugins().forEach(this::setPluginDir); return Optional.of(config); } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java index faeeabbf955eb..5280f520109fd 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/configuration/PluginConfig.java @@ -1,7 +1,7 @@ package com.datahub.plugins.configuration; -import com.datahub.plugins.common.PluginType; import com.datahub.plugins.common.ConfigValidationUtils; +import com.datahub.plugins.common.PluginType; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder; @@ -13,10 +13,7 @@ import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; - -/** - * POJO to map YAML section present in config.yml at plugins[] - */ +/** POJO to map YAML section present in config.yml at plugins[] */ @Data @NoArgsConstructor @AllArgsConstructor @@ -24,35 +21,30 @@ @JsonDeserialize(builder = PluginConfig.CustomBuilder.class) @EqualsAndHashCode(onlyExplicitlyIncluded = true) public class PluginConfig { - /** - * name of the plugin. It should be unique in plugins[] list - */ - @EqualsAndHashCode.Include - private String name; // In list of plugin, the name should be unique + /** name of the plugin. It should be unique in plugins[] list */ + @EqualsAndHashCode.Include private String name; // In list of plugin, the name should be unique - /** - * Type of plugin. Supported types are {@link PluginType} - */ + /** Type of plugin. Supported types are {@link PluginType} */ private PluginType type; /** - * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take authentication/authorization - * decisions. + * Whether to load the plugin in GMS. If set to true plugin will be loaded in GMS take + * authentication/authorization decisions. */ private Boolean enabled; /** - * Attributes in params should be as per POJO {@link com.datahub.plugins.auth.configuration.AuthParam} + * Attributes in params should be as per POJO {@link + * com.datahub.plugins.auth.configuration.AuthParam} */ private Map params; /** - * It is always set to /. - * For example if plugin-name is ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then - * pluginDirectory would be /etc/datahub/plugins/auth/ranger-authorizer + * It is always set to /. For example if plugin-name is + * ranger-authorizer and plugin-base-directory is /etc/datahub/plugins/auth then pluginDirectory + * would be /etc/datahub/plugins/auth/ranger-authorizer */ - @JsonIgnore - private Path pluginHomeDirectory; + @JsonIgnore private Path pluginHomeDirectory; public static CustomBuilder builder() { return new CustomBuilder(); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java index b0a59a1656c8d..80837b966ba58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/factory/PluginConfigFactory.java @@ -11,12 +11,9 @@ import java.util.Map; import javax.annotation.Nonnull; - -/** - * Create instance of config provider as per type mentioned in {@link Config} - */ +/** Create instance of config provider as per type mentioned in {@link Config} */ public class PluginConfigFactory { - private final static Map CONFIG_PROVIDER_REGISTRY; + private static final Map CONFIG_PROVIDER_REGISTRY; static { CONFIG_PROVIDER_REGISTRY = new HashMap<>(2); diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java index 92a7cae0647c5..1529df3ede676 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/IsolatedClassLoader.java @@ -30,10 +30,9 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; - /** - * IsolatedClassLoader to load custom implementation of DataHub Plugins. - * Override methods behave as per Java ClassLoader documentation. + * IsolatedClassLoader to load custom implementation of DataHub Plugins. Override methods behave as + * per Java ClassLoader documentation. */ @Slf4j public class IsolatedClassLoader extends ClassLoader { @@ -50,22 +49,30 @@ public class IsolatedClassLoader extends ClassLoader { private final Path _executionDirectory; - public IsolatedClassLoader(@Nonnull PluginPermissionManager pluginPermissionManager, - @Nonnull PluginConfig pluginToLoad, @Nonnull ClassLoader... applicationClassLoaders) { + public IsolatedClassLoader( + @Nonnull PluginPermissionManager pluginPermissionManager, + @Nonnull PluginConfig pluginToLoad, + @Nonnull ClassLoader... applicationClassLoaders) { this._pluginPermissionManager = pluginPermissionManager; this._pluginConfig = pluginToLoad; this._classLoaders.add(this.getClass().getClassLoader()); // then application class-loader this._classLoaders.addAll(Arrays.asList(applicationClassLoaders)); // if any extra class loaders this._executionDirectory = - Paths.get("/tmp", pluginToLoad.getPluginHomeDirectory().toString(), EXECUTION_DIR); // to store .so files i.e. libraries + Paths.get( + "/tmp", + pluginToLoad.getPluginHomeDirectory().toString(), + EXECUTION_DIR); // to store .so files i.e. libraries try { this.createJarEntryMap(); } catch (IOException e) { - // This would occur if we don't have permission on directory and chances of this is close to zero, hence catching + // This would occur if we don't have permission on directory and chances of this is close to + // zero, hence catching // this checked exception and throwing runtime exception // to make caller code more readable - String message = String.format("Unable to load jar file %s for plugin %s", pluginToLoad.getPluginJarPath(), - pluginToLoad.getName()); + String message = + String.format( + "Unable to load jar file %s for plugin %s", + pluginToLoad.getPluginJarPath(), pluginToLoad.getName()); throw new RuntimeException(message, e); } } @@ -85,15 +92,18 @@ private void createJarEntryMap() throws IOException { } /** - * Load plugin class from jar given in pluginToLoad parameter and return instance of class which implements Plugin - * interface. - * This method verifies whether loaded plugin is assignable to expectedInstanceOf class + * Load plugin class from jar given in pluginToLoad parameter and return instance of class which + * implements Plugin interface. This method verifies whether loaded plugin is assignable to + * expectedInstanceOf class + * * @param expectedInstanceOf class instance of interface caller is expecting * @return Instance of Plugin - * @throws ClassNotFoundException className parameter available in Plugin configuration is not found + * @throws ClassNotFoundException className parameter available in Plugin configuration is not + * found */ @Nonnull - public Plugin instantiatePlugin(@Nonnull Class expectedInstanceOf) throws ClassNotFoundException { + public Plugin instantiatePlugin(@Nonnull Class expectedInstanceOf) + throws ClassNotFoundException { Class clazz = this.loadClass(this._pluginConfig.getClassName(), true); try { @@ -102,14 +112,17 @@ public Plugin instantiatePlugin(@Nonnull Class expectedInstanc // Check loaded plugin has implemented the proper implementation of child interface if (!expectedInstanceOf.isAssignableFrom(clazz)) { throw new InstantiationException( - String.format("In plugin %s, the class %s has not implemented the interface %s", - this._pluginConfig.getName(), plugin.getClass().getCanonicalName(), + String.format( + "In plugin %s, the class %s has not implemented the interface %s", + this._pluginConfig.getName(), + plugin.getClass().getCanonicalName(), expectedInstanceOf.getCanonicalName())); } log.debug("Successfully created instance of plugin {}", this._pluginConfig.getClassName()); return plugin; } catch (InstantiationException | IllegalAccessException e) { - throw new RuntimeException(String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); + throw new RuntimeException( + String.format("Failed to instantiate the plugin %s", this._pluginConfig.getName()), e); } } @@ -157,7 +170,8 @@ protected Class loadClass(String s, boolean b) throws ClassNotFoundException byte[] classBytes = getClassData(this._classPathVsZipEntry.get(path)); ProtectionDomain protectionDomain = - this._pluginPermissionManager.createProtectionDomain(this._pluginConfig.getPluginHomeDirectory()); + this._pluginPermissionManager.createProtectionDomain( + this._pluginConfig.getPluginHomeDirectory()); return defineClass(s, classBytes, 0, classBytes.length, protectionDomain); } @@ -210,8 +224,11 @@ private Optional findResourceInPluginJar(String resource) { private Optional findResourceInPluginHome(String resource) { try { - try (Stream stream = Files.find(this._pluginConfig.getPluginHomeDirectory(), 1, - ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { + try (Stream stream = + Files.find( + this._pluginConfig.getPluginHomeDirectory(), + 1, + ((path, basicFileAttributes) -> path.toFile().getName().equals(resource)))) { List resources = stream.collect(Collectors.toList()); if (resources.size() > 0) { log.debug("Number of resources found {}", resources.size()); @@ -227,9 +244,9 @@ private Optional findResourceInPluginHome(String resource) { } /** - * Look for resource in below order - * - First search in plugin jar if not found - * - then search in plugin directory if not found then return null + * Look for resource in below order - First search in plugin jar if not found - then search in + * plugin directory if not found then return null + * * @param resource Resource to find * @return URL of the resource */ diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java index a20e9d0760968..f27a2e2551d58 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/JarExtractor.java @@ -8,15 +8,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j class JarExtractor { - private JarExtractor() { - } + private JarExtractor() {} /** * Write url content to destinationFilePath + * * @param url * @param destinationFilePath * @throws IOException @@ -30,4 +29,4 @@ public static void write(@Nonnull URL url, @Nonnull Path destinationFilePath) th } } } -} \ No newline at end of file +} diff --git a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java index 0596f8abcea74..7107787fdec3b 100644 --- a/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java +++ b/metadata-service/plugin/src/main/java/com/datahub/plugins/loader/PluginPermissionManagerImpl.java @@ -11,7 +11,6 @@ import java.security.cert.Certificate; import javax.annotation.Nonnull; - public class PluginPermissionManagerImpl implements PluginPermissionManager { private final SecurityMode _securityMode; @@ -21,7 +20,9 @@ public PluginPermissionManagerImpl(@Nonnull SecurityMode securityMode) { } /** - * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to the plugin code + * Create codeSource instance for the location of pluginHome to apply SecurityMode restriction to + * the plugin code + * * @param pluginHome * @return ProtectionDomain */ diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java index 64c53f1cb6db3..ccc95e4941ad0 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfig.java @@ -8,7 +8,6 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestConfig { @Test @@ -16,8 +15,12 @@ public void testConfig() { PluginConfig authorizerConfig = new PluginConfig(); authorizerConfig.setName("apache-ranger-authorizer"); authorizerConfig.setType(PluginType.AUTHORIZER); - authorizerConfig.setParams(Map.of("className", "com.datahub.authorization.ranger.RangerAuthorizer", "configs", - Map.of("username", "foo", "password", "root123"))); + authorizerConfig.setParams( + Map.of( + "className", + "com.datahub.authorization.ranger.RangerAuthorizer", + "configs", + Map.of("username", "foo", "password", "root123"))); PluginConfig authenticatorConfig = new PluginConfig(); authorizerConfig.setName("sample-authenticator"); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java index e311aae258109..bfb83f0ddfb24 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigProvider.java @@ -11,7 +11,6 @@ import java.util.function.Consumer; import org.testng.annotations.Test; - @Test public class TestConfigProvider { @Test @@ -26,45 +25,57 @@ public void testConfigurationLoading() throws Exception { List authenticators = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - List authorizers = authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + List authorizers = + authenticatorPluginPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); assert authenticators.size() != 0; assert authorizers.size() != 0; - Consumer validateAuthenticationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authenticator"); + Consumer validateAuthenticationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authenticator"); - assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authenticator".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - String pluginJarPath = - Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authenticator", "apache-ranger-authenticator.jar") - .toAbsolutePath() - .toString(); - assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); + String pluginJarPath = + Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator.jar") + .toAbsolutePath() + .toString(); + assert pluginJarPath.equals(plugin.getPluginJarPath().toString()); - String pluginDirectory = Paths.get(pluginBaseDirectory.toString(), plugin.getName()).toAbsolutePath().toString(); - assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); - }; + String pluginDirectory = + Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString(); + assert pluginDirectory.equals(plugin.getPluginHomeDirectory().toString()); + }; - Consumer validateAuthorizationPlugin = (plugin) -> { - assert plugin.getName().equals("apache-ranger-authorizer"); + Consumer validateAuthorizationPlugin = + (plugin) -> { + assert plugin.getName().equals("apache-ranger-authorizer"); - assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); + assert "com.datahub.ranger.Authorizer".equals(plugin.getClassName()); - assert plugin.getEnabled(); + assert plugin.getEnabled(); - assert Paths.get(pluginBaseDirectory.toString(), "apache-ranger-authorizer", "apache-ranger-authorizer.jar") - .toAbsolutePath() - .toString() - .equals(plugin.getPluginJarPath().toString()); + assert Paths.get( + pluginBaseDirectory.toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer.jar") + .toAbsolutePath() + .toString() + .equals(plugin.getPluginJarPath().toString()); - assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) - .toAbsolutePath() - .toString() - .equals(plugin.getPluginHomeDirectory().toString()); - }; + assert Paths.get(pluginBaseDirectory.toString(), plugin.getName()) + .toAbsolutePath() + .toString() + .equals(plugin.getPluginHomeDirectory().toString()); + }; authenticators.forEach(validateAuthenticationPlugin); authorizers.forEach(validateAuthorizationPlugin); diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java index d85bfc0379d17..6596ca0c83f33 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestConfigValidationUtils.java @@ -6,7 +6,6 @@ import java.util.List; import org.testng.annotations.Test; - @Test public class TestConfigValidationUtils { diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java index 314849e8ebea5..5e447caa292e2 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestIsolatedClassLoader.java @@ -30,35 +30,32 @@ import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; - /** - * This test case covers below scenarios - * 1. Loading plugin configuration and validating the loaded configuration against the expected configuration. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} - * test + * This test case covers below scenarios 1. Loading plugin configuration and validating the loaded + * configuration against the expected configuration. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testConfigurationLoading()} test * - * 2. Plugin name should be unique in config.yaml. The plugin framework should raise error if more than one plugin - * has the same name. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} - * test + *

2. Plugin name should be unique in config.yaml. The plugin framework should raise error if + * more than one plugin has the same name. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testDuplicatePluginName()} test * - * 3. Developer can provide plugin jar file name in config.yaml. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test + *

3. Developer can provide plugin jar file name in config.yaml. This scenario is covered + * in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testJarFileName()} test * - * 4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} covers the valid - * authenticator plugin execution. - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthenticator.java + *

4. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthenticatorPlugin()} + * covers the valid authenticator plugin execution. Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthenticator.java * - * 5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers the valid - * authorizer plugin execution - * Plugin used in this test-case is metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub - * /plugins/test/TestAuthorizer.java + *

5. Test @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testAuthorizerPlugin()} covers + * the valid authorizer plugin execution Plugin used in this test-case is + * metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub + * /plugins/test/TestAuthorizer.java * - * 6. The plugin framework should raise error if authenticator plugin is configured as authorizer plugin or vice-versa. - * This scenario is covered in @{link com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation - * ()}. - * The test case tries to load authorizer plugin as authenticator plugin + *

6. The plugin framework should raise error if authenticator plugin is configured as authorizer + * plugin or vice-versa. This scenario is covered in @{link + * com.datahub.plugins.auth.TestIsolatedClassLoader#testIncorrectImplementation ()}. The test case + * tries to load authorizer plugin as authenticator plugin */ class TestIsolatedClassLoader { @@ -84,22 +81,34 @@ public void testDuplicatePluginName() { public void testJarFileName() throws Exception { Path configPath = Paths.get("src", "test", "resources", "plugin-jar-from-jarFileName"); - Path authenticatorPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authenticator", - "apache-ranger-authenticator-v1.0.1.jar"); - Config config = (new ConfigProvider(configPath)).load().orElseThrow(() -> new Exception("Should not be empty")); - List pluginConfig = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); - pluginConfig.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); - }); - - Path authorizerPluginJarPath = Paths.get(configPath.toAbsolutePath().toString(), "apache-ranger-authorizer", - "apache-ranger-authorizer-v2.0.1.jar"); + Path authenticatorPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authenticator", + "apache-ranger-authenticator-v1.0.1.jar"); + Config config = + (new ConfigProvider(configPath)) + .load() + .orElseThrow(() -> new Exception("Should not be empty")); + List pluginConfig = + (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHENTICATOR); + pluginConfig.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authenticatorPluginJarPath); + }); + + Path authorizerPluginJarPath = + Paths.get( + configPath.toAbsolutePath().toString(), + "apache-ranger-authorizer", + "apache-ranger-authorizer-v2.0.1.jar"); List authorizerPluginConfigs = (new PluginConfigFactory(config)).loadPluginConfigs(PluginType.AUTHORIZER); - authorizerPluginConfigs.forEach((pluginConfigWithJar) -> { - assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); - }); + authorizerPluginConfigs.forEach( + (pluginConfigWithJar) -> { + assert pluginConfigWithJar.getPluginJarPath().equals(authorizerPluginJarPath); + }); } public static Path getSamplePluginDirectory() { @@ -145,14 +154,21 @@ public void testAuthenticatorPlugin() throws ClassNotFoundException, Authenticat // authenticator plugin config instance AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); - AuthenticatorContext authenticatorContext = new AuthenticatorContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authenticatorPluginConfig.getPluginHomeDirectory().toString())); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + AuthenticatorContext authenticatorContext = + new AuthenticatorContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authenticatorPluginConfig.getPluginHomeDirectory().toString())); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); - authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); + authenticator.init( + authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), authenticatorContext); Authentication authentication = authenticator.authenticate(request); assert authentication.getActor().getId().equals("fake"); @@ -163,13 +179,20 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication // authenticator plugin config instance AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authorizer.class); - AuthorizerContext authorizerContext = new AuthorizerContext( - ImmutableMap.of(PluginConstant.PLUGIN_HOME, authorizerPluginConfig.getPluginHomeDirectory().toString()), null); - AuthorizationRequest authorizationRequest = new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); + AuthorizerContext authorizerContext = + new AuthorizerContext( + ImmutableMap.of( + PluginConstant.PLUGIN_HOME, + authorizerPluginConfig.getPluginHomeDirectory().toString()), + null); + AuthorizationRequest authorizationRequest = + new AuthorizationRequest("urn:li:user:fake", "test", Optional.empty()); authorizer.init(authorizerPluginConfig.getConfigs().orElse(new HashMap<>()), authorizerContext); assert authorizer.authorize(authorizationRequest).getMessage().equals("fake message"); } @@ -178,13 +201,17 @@ public void testAuthorizerPlugin() throws ClassNotFoundException, Authentication public void testIncorrectImplementation() { AuthorizerPluginConfig authorizerPluginConfig = getAuthorizerPluginConfig(); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authorizerPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authorizerPluginConfig); // initiate and invoke the init and authenticate methods try { - // Authorizer configuration is provided, however here we were expecting that plugin should be of type + // Authorizer configuration is provided, however here we were expecting that plugin should be + // of type // Authenticator.class - Authorizer authorizer = (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authorizer authorizer = + (Authorizer) isolatedClassLoader.instantiatePlugin(Authenticator.class); assert authorizer != null; } catch (RuntimeException | ClassNotFoundException e) { assert e.getCause() instanceof java.lang.InstantiationException; @@ -197,10 +224,13 @@ public void testLenientMode() throws ClassNotFoundException, AuthenticationExcep AuthenticatorPluginConfig authenticatorPluginConfig = getAuthenticatorPluginConfig(); authenticatorPluginConfig.setClassName("com.datahub.plugins.test.TestLenientModeAuthenticator"); // create IsolatedClassLoader - PluginPermissionManager permissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); - IsolatedClassLoader isolatedClassLoader = new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); + PluginPermissionManager permissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); + IsolatedClassLoader isolatedClassLoader = + new IsolatedClassLoader(permissionManager, authenticatorPluginConfig); // initiate and invoke the init and authenticate methods - Authenticator authenticator = (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); + Authenticator authenticator = + (Authenticator) isolatedClassLoader.instantiatePlugin(Authenticator.class); authenticator.init(authenticatorPluginConfig.getConfigs().orElse(new HashMap<>()), null); AuthenticationRequest request = new AuthenticationRequest(ImmutableMap.of("foo", "bar")); assert authenticator.authenticate(request) != null; diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java index 1d182f5fa8ea7..f620a1687064c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginConfigFactory.java @@ -10,7 +10,6 @@ import java.util.List; import org.testng.annotations.Test; - public class TestPluginConfigFactory { @Test @@ -26,14 +25,20 @@ public void authConfig() throws Exception { // Load authenticator plugin configuration List authenticatorConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHENTICATOR); - authenticatorConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authenticator"); // className should match to Authenticator - }); + authenticatorConfigs.forEach( + c -> { + assert c.getClassName() + .equals( + "com.datahub.ranger.Authenticator"); // className should match to Authenticator + }); // Load authorizer plugin configuration - List authorizerConfigs = authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); - authorizerConfigs.forEach(c -> { - assert c.getClassName().equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer - }); + List authorizerConfigs = + authenticatorPluginConfigFactory.loadPluginConfigs(PluginType.AUTHORIZER); + authorizerConfigs.forEach( + c -> { + assert c.getClassName() + .equals("com.datahub.ranger.Authorizer"); // className should match to Authorizer + }); } } diff --git a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java index e6882e7de3120..56e4c150b100c 100644 --- a/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java +++ b/metadata-service/plugin/src/test/java/com/datahub/plugins/auth/TestPluginPermissionManager.java @@ -14,19 +14,23 @@ import java.util.Map; import org.testng.annotations.Test; - @Test public class TestPluginPermissionManager { @Test public void testRestrictedMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.RESTRICTED); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -43,21 +47,27 @@ public void testRestrictedMode() throws MalformedURLException { map.put(pluginHome.toAbsolutePath() + "/*", "read,write,delete"); // Compare actual with expected - permissions.forEach(permission -> { - assert map.keySet().contains(permission.getName()); - assert map.values().contains(permission.getActions()); - }); + permissions.forEach( + permission -> { + assert map.keySet().contains(permission.getName()); + assert map.values().contains(permission.getActions()); + }); } public void testLenientMode() throws MalformedURLException { - PluginPermissionManagerImpl pluginPermissionManager = new PluginPermissionManagerImpl(SecurityMode.LENIENT); + PluginPermissionManagerImpl pluginPermissionManager = + new PluginPermissionManagerImpl(SecurityMode.LENIENT); - Path pluginHome = Paths.get("src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); + Path pluginHome = + Paths.get( + "src", "test", "resources", "valid-base-plugin-dir1", "apache-ranger-authenticator"); - ProtectionDomain protectionDomain = pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); + ProtectionDomain protectionDomain = + pluginPermissionManager.createProtectionDomain(pluginHome.toAbsolutePath()); // provided pluginHome and codeSource in protection domain should be equal - assert pluginHome.toUri() + assert pluginHome + .toUri() .toURL() .toExternalForm() .equals(protectionDomain.getCodeSource().getLocation().toExternalForm()); @@ -68,8 +78,9 @@ public void testLenientMode() throws MalformedURLException { // It should have 1 permission assert permissions.size() == 1; - permissions.forEach(permission -> { - assert permission.getName().equals(""); - }); + permissions.forEach( + permission -> { + assert permission.getName().equals(""); + }); } } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java index 4fb958de2edd6..e234a150ccd73 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthenticator.java @@ -25,13 +25,13 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthenticator implements Authenticator { private AuthenticatorContext _authenticatorContext; @Override - public void init(@Nonnull Map authenticatorConfig, @Nullable AuthenticatorContext context) { + public void init( + @Nonnull Map authenticatorConfig, @Nullable AuthenticatorContext context) { /* * authenticatorConfig contains key, value pairs set in plugins[].params.configs of config.yml */ @@ -48,7 +48,8 @@ public void init(@Nonnull Map authenticatorConfig, @Nullable Aut private void readInputStream() { // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -59,9 +60,12 @@ private void readInputStream() { } private void accessFile() { - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authenticatorContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -79,7 +83,8 @@ private void accessFile() { public void accessSystemProperty() { try { System.getProperty("user.home"); - throw new RuntimeException("Plugin is able to access system properties"); // we should not reach here + throw new RuntimeException( + "Plugin is able to access system properties"); // we should not reach here } catch (AccessControlException accessControlException) { log.info("Expected: Don't have permission to read system properties"); } diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java index e5f3e223ff505..4dcace841205a 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestAuthorizer.java @@ -21,7 +21,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TestAuthorizer implements Authorizer { private AuthorizerContext _authorizerContext; @@ -45,9 +44,12 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { URL url = this.getClass().getClassLoader().getResource("foo_bar.json"); assert url != null; - // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write on plugin directory + // Try to create a file on PLUGIN_DIRECTORY to test plugin should have permission to read/write + // on plugin directory Path pluginDirectory = - Paths.get((String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), "tmp_file1.txt"); + Paths.get( + (String) this._authorizerContext.data().get(PluginConstant.PLUGIN_HOME), + "tmp_file1.txt"); try { try (BufferedWriter writer = new BufferedWriter(new FileWriter(pluginDirectory.toString()))) { @@ -62,7 +64,8 @@ public AuthorizationResult authorize(@Nonnull AuthorizationRequest request) { } // Test resource as stream is working - try (InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { + try (InputStream inputStream = + this.getClass().getClassLoader().getResourceAsStream("foo_bar.json")) { assert inputStream != null; BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); assert reader.readLine() != null; @@ -78,4 +81,3 @@ public AuthorizedActors authorizedActors(String privilege, Optional return new AuthorizedActors("ALL", null, null, null, true, true); } } - diff --git a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java index 2cc27f11a6254..d143b3803ca34 100644 --- a/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java +++ b/metadata-service/plugin/src/test/sample-test-plugins/src/main/java/com/datahub/plugins/test/TestLenientModeAuthenticator.java @@ -11,18 +11,17 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class TestLenientModeAuthenticator implements Authenticator { @Override - public void init(@Nonnull Map authenticatorConfig, @Nullable AuthenticatorContext context) { - - } + public void init( + @Nonnull Map authenticatorConfig, @Nullable AuthenticatorContext context) {} @Nullable @Override public Authentication authenticate(@Nonnull AuthenticationRequest authenticationRequest) throws AuthenticationException { - // We should be able to access user directory as we are going to be loaded with Lenient mode IsolatedClassLoader + // We should be able to access user directory as we are going to be loaded with Lenient mode + // IsolatedClassLoader String userHome = System.getProperty("user.home"); assert userHome != null; return new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json index 3e1b975311b11..27581334814ce 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.analytics.analytics.restspec.json @@ -4,10 +4,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json index 3a0df137a0469..917540aca8728 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.aspects.restspec.json @@ -4,6 +4,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -12,6 +13,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -25,6 +27,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -36,6 +39,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -73,6 +77,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -84,6 +89,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json index a9de21d08aedc..eac1cc690a60d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entities.restspec.json @@ -4,6 +4,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -28,6 +31,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -52,6 +56,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -73,6 +78,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -80,6 +86,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -90,6 +97,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -110,6 +118,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -119,7 +128,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -134,6 +143,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -146,6 +156,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -157,6 +168,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -164,6 +176,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -184,6 +197,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -191,6 +205,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -198,6 +213,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -208,6 +224,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -229,6 +246,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -242,6 +260,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -274,6 +293,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -325,6 +345,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -360,6 +381,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -389,6 +411,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -437,6 +460,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json index 0c92a981c7356..33cfba0f27802 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -20,6 +22,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json index 579f1d7c7dddc..f3eb9d38dc6ae 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.entitiesVersionedV2.restspec.json @@ -4,6 +4,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -12,6 +13,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json index 5eaa34bc7a2e9..7284cd2bac48f 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json @@ -4,6 +4,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -33,6 +35,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -50,6 +53,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json index 68f9fe8ae152e..7056368d82c7d 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.lineage.relationships.restspec.json @@ -4,10 +4,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -28,6 +30,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -35,6 +38,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json index 958ec13b37fca..0fb6a18a7974b 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.operations.operations.restspec.json @@ -4,6 +4,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -28,9 +30,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -55,6 +59,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json index 3346ddd23e3ba..9fbb3e9b6698e 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.platform.platform.restspec.json @@ -4,6 +4,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -12,6 +13,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json index 2a4cf40b58412..42f0894fbb7a6 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.usage.usageStats.restspec.json @@ -7,6 +7,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -14,12 +15,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -42,6 +45,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json index d75ec58546465..c4532cba9e6be 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.analytics.analytics.snapshot.json @@ -222,10 +222,12 @@ "path" : "/analytics", "schema" : "com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse", "doc" : "Rest.li entry point: /analytics\n\ngenerated from: com.linkedin.metadata.resources.analytics.Analytics", + "resourceClass" : "com.linkedin.metadata.resources.analytics.Analytics", "simple" : { "supports" : [ ], "actions" : [ { "name" : "getTimeseriesStats", + "javaMethodName" : "getTimeseriesStats", "parameters" : [ { "name" : "entityName", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index 0403fa2ceea6f..bca3e7161c8b8 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -3993,6 +3993,7 @@ "path" : "/aspects", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.AspectResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.AspectResource", "collection" : { "identifier" : { "name" : "aspectsId", @@ -4001,6 +4002,7 @@ "supports" : [ "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.\n TODO: Get rid of this and migrate to getAspect.", "parameters" : [ { "name" : "aspect", @@ -4014,6 +4016,7 @@ } ], "actions" : [ { "name" : "getCount", + "javaMethodName" : "getCount", "parameters" : [ { "name" : "aspect", "type" : "string" @@ -4025,6 +4028,7 @@ "returns" : "int" }, { "name" : "getTimeseriesAspectValues", + "javaMethodName" : "getTimeseriesAspectValues", "parameters" : [ { "name" : "urn", "type" : "string" @@ -4062,6 +4066,7 @@ "returns" : "com.linkedin.aspect.GetTimeseriesAspectValuesResponse" }, { "name" : "ingestProposal", + "javaMethodName" : "ingestProposal", "parameters" : [ { "name" : "proposal", "type" : "com.linkedin.mxe.MetadataChangeProposal" @@ -4073,6 +4078,7 @@ "returns" : "string" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index d79a4a1919af9..69184856e4f9e 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -6289,6 +6289,7 @@ "path" : "/entities", "schema" : "com.linkedin.entity.Entity", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityResource", "collection" : { "identifier" : { "name" : "entitiesId", @@ -6297,6 +6298,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -6305,6 +6307,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6313,6 +6316,7 @@ } ], "actions" : [ { "name" : "applyRetention", + "javaMethodName" : "applyRetention", "parameters" : [ { "name" : "start", "type" : "int", @@ -6337,6 +6341,7 @@ "returns" : "string" }, { "name" : "autocomplete", + "javaMethodName" : "autocomplete", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6358,6 +6363,7 @@ "returns" : "com.linkedin.metadata.query.AutoCompleteResult" }, { "name" : "batchGetTotalEntityCount", + "javaMethodName" : "batchGetTotalEntityCount", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }" @@ -6365,6 +6371,7 @@ "returns" : "{ \"type\" : \"map\", \"values\" : \"long\" }" }, { "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.entity.Entity\" }" @@ -6375,6 +6382,7 @@ } ] }, { "name" : "browse", + "javaMethodName" : "browse", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6395,6 +6403,7 @@ "returns" : "com.linkedin.metadata.browse.BrowseResult" }, { "name" : "delete", + "javaMethodName" : "deleteEntity", "doc" : "Deletes all data related to an individual urn(entity).\nService Returns: - a DeleteEntityResponse object.", "parameters" : [ { "name" : "urn", @@ -6404,7 +6413,7 @@ "name" : "aspectName", "type" : "string", "optional" : true, - "doc" : "- the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects)." + "doc" : "- the optional aspect name if only want to delete the aspect (applicable only\n for timeseries aspects)." }, { "name" : "startTimeMillis", "type" : "long", @@ -6419,6 +6428,7 @@ "returns" : "com.linkedin.metadata.run.DeleteEntityResponse" }, { "name" : "deleteAll", + "javaMethodName" : "deleteEntities", "parameters" : [ { "name" : "registryId", "type" : "string", @@ -6431,6 +6441,7 @@ "returns" : "com.linkedin.metadata.run.RollbackResponse" }, { "name" : "deleteReferences", + "javaMethodName" : "deleteReferencesTo", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6442,6 +6453,7 @@ "returns" : "com.linkedin.metadata.run.DeleteReferencesResponse" }, { "name" : "exists", + "javaMethodName" : "exists", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6449,6 +6461,7 @@ "returns" : "boolean" }, { "name" : "filter", + "javaMethodName" : "filter", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6469,6 +6482,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "getBrowsePaths", + "javaMethodName" : "getBrowsePaths", "parameters" : [ { "name" : "urn", "type" : "com.linkedin.common.Urn" @@ -6476,6 +6490,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"string\" }" }, { "name" : "getTotalEntityCount", + "javaMethodName" : "getTotalEntityCount", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6483,6 +6498,7 @@ "returns" : "long" }, { "name" : "ingest", + "javaMethodName" : "ingest", "parameters" : [ { "name" : "entity", "type" : "com.linkedin.entity.Entity" @@ -6493,6 +6509,7 @@ } ] }, { "name" : "list", + "javaMethodName" : "list", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6514,6 +6531,7 @@ "returns" : "com.linkedin.metadata.query.ListResult" }, { "name" : "listUrns", + "javaMethodName" : "listUrns", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6527,6 +6545,7 @@ "returns" : "com.linkedin.metadata.query.ListUrnsResult" }, { "name" : "scrollAcrossEntities", + "javaMethodName" : "scrollAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6559,6 +6578,7 @@ "returns" : "com.linkedin.metadata.search.ScrollResult" }, { "name" : "scrollAcrossLineage", + "javaMethodName" : "scrollAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6610,6 +6630,7 @@ "returns" : "com.linkedin.metadata.search.LineageScrollResult" }, { "name" : "search", + "javaMethodName" : "search", "parameters" : [ { "name" : "entity", "type" : "string" @@ -6645,6 +6666,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossEntities", + "javaMethodName" : "searchAcrossEntities", "parameters" : [ { "name" : "entities", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", @@ -6674,6 +6696,7 @@ "returns" : "com.linkedin.metadata.search.SearchResult" }, { "name" : "searchAcrossLineage", + "javaMethodName" : "searchAcrossLineage", "parameters" : [ { "name" : "urn", "type" : "string" @@ -6722,6 +6745,7 @@ "returns" : "com.linkedin.metadata.search.LineageSearchResult" }, { "name" : "setWritable", + "javaMethodName" : "setWriteable", "parameters" : [ { "name" : "value", "type" : "boolean", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json index c7618e5d3c5a1..3eac87e268f5d 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesV2.snapshot.json @@ -162,6 +162,7 @@ "path" : "/entitiesV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityV2Resource", "collection" : { "identifier" : { "name" : "entitiesV2Id", @@ -170,6 +171,7 @@ "supports" : [ "batch_get", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "aspects", @@ -178,6 +180,7 @@ } ] }, { "method" : "batch_get", + "javaMethodName" : "batchGet", "parameters" : [ { "name" : "aspects", "type" : "{ \"type\" : \"array\", \"items\" : \"string\" }", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json index 45e542883b723..1733537e68f30 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entitiesVersionedV2.snapshot.json @@ -171,6 +171,7 @@ "path" : "/entitiesVersionedV2", "schema" : "com.linkedin.entity.EntityResponse", "doc" : "Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities\n\ngenerated from: com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", + "resourceClass" : "com.linkedin.metadata.resources.entity.EntityVersionedV2Resource", "collection" : { "identifier" : { "name" : "entitiesVersionedV2Id", @@ -179,6 +180,7 @@ "supports" : [ "batch_get" ], "methods" : [ { "method" : "batch_get", + "javaMethodName" : "batchGetVersioned", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index b20953749ac35..09c0185f74f3a 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -3748,6 +3748,7 @@ "path" : "/runs", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "resource for showing information and rolling back runs\n\ngenerated from: com.linkedin.metadata.resources.entity.BatchIngestionRunResource", + "resourceClass" : "com.linkedin.metadata.resources.entity.BatchIngestionRunResource", "collection" : { "identifier" : { "name" : "runsId", @@ -3756,6 +3757,7 @@ "supports" : [ ], "actions" : [ { "name" : "describe", + "javaMethodName" : "describe", "parameters" : [ { "name" : "runId", "type" : "string" @@ -3777,6 +3779,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.AspectRowSummary\" }" }, { "name" : "list", + "javaMethodName" : "list", "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", "parameters" : [ { "name" : "pageOffset", @@ -3794,6 +3797,7 @@ "returns" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.metadata.run.IngestionRunSummary\" }" }, { "name" : "rollback", + "javaMethodName" : "rollback", "doc" : "Rolls back an ingestion run", "parameters" : [ { "name" : "runId", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json index 6febf225ad77d..9aa40edd0b118 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.lineage.relationships.snapshot.json @@ -180,10 +180,12 @@ "path" : "/relationships", "schema" : "com.linkedin.common.EntityRelationships", "doc" : "Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types}\n\ngenerated from: com.linkedin.metadata.resources.lineage.Relationships", + "resourceClass" : "com.linkedin.metadata.resources.lineage.Relationships", "simple" : { "supports" : [ "delete", "get" ], "methods" : [ { "method" : "get", + "javaMethodName" : "get", "parameters" : [ { "name" : "urn", "type" : "string" @@ -204,6 +206,7 @@ } ] }, { "method" : "delete", + "javaMethodName" : "delete", "parameters" : [ { "name" : "urn", "type" : "string" @@ -211,6 +214,7 @@ } ], "actions" : [ { "name" : "getLineage", + "javaMethodName" : "getLineage", "parameters" : [ { "name" : "urn", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index e29dd6809b968..339ce62de6298 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -3690,6 +3690,7 @@ "path" : "/operations", "schema" : "com.linkedin.metadata.aspect.VersionedAspect", "doc" : "Endpoints for performing maintenance operations\n\ngenerated from: com.linkedin.metadata.resources.operations.OperationsResource", + "resourceClass" : "com.linkedin.metadata.resources.operations.OperationsResource", "collection" : { "identifier" : { "name" : "operationsId", @@ -3698,6 +3699,7 @@ "supports" : [ ], "actions" : [ { "name" : "getEsTaskStatus", + "javaMethodName" : "getTaskStatus", "parameters" : [ { "name" : "nodeId", "type" : "string", @@ -3714,9 +3716,11 @@ "returns" : "string" }, { "name" : "getIndexSizes", + "javaMethodName" : "getIndexSizes", "returns" : "com.linkedin.timeseries.TimeseriesIndicesSizesResult" }, { "name" : "restoreIndices", + "javaMethodName" : "restoreIndices", "parameters" : [ { "name" : "aspect", "type" : "string", @@ -3741,6 +3745,7 @@ "returns" : "string" }, { "name" : "truncateTimeseriesAspect", + "javaMethodName" : "truncateTimeseriesAspect", "parameters" : [ { "name" : "entityType", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 8391af60f8ece..cb253c458e6c4 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -5542,6 +5542,7 @@ "path" : "/platform", "schema" : "com.linkedin.entity.Entity", "doc" : "DataHub Platform Actions\n\ngenerated from: com.linkedin.metadata.resources.platform.PlatformResource", + "resourceClass" : "com.linkedin.metadata.resources.platform.PlatformResource", "collection" : { "identifier" : { "name" : "platformId", @@ -5550,6 +5551,7 @@ "supports" : [ ], "actions" : [ { "name" : "producePlatformEvent", + "javaMethodName" : "producePlatformEvent", "parameters" : [ { "name" : "name", "type" : "string" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json index a21b0c1cd30be..e8e68dae4c368 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.usage.usageStats.snapshot.json @@ -164,6 +164,7 @@ "path" : "/usageStats", "schema" : "com.linkedin.usage.UsageAggregation", "doc" : "Rest.li entry point: /usageStats\n\ngenerated from: com.linkedin.metadata.resources.usage.UsageStats", + "resourceClass" : "com.linkedin.metadata.resources.usage.UsageStats", "simple" : { "supports" : [ ], "actions" : [ { @@ -171,12 +172,14 @@ "deprecated" : { } }, "name" : "batchIngest", + "javaMethodName" : "batchIngest", "parameters" : [ { "name" : "buckets", "type" : "{ \"type\" : \"array\", \"items\" : \"com.linkedin.usage.UsageAggregation\" }" } ] }, { "name" : "query", + "javaMethodName" : "query", "parameters" : [ { "name" : "resource", "type" : "string" @@ -199,6 +202,7 @@ "returns" : "com.linkedin.usage.UsageQueryResult" }, { "name" : "queryRange", + "javaMethodName" : "queryRange", "parameters" : [ { "name" : "resource", "type" : "string" diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java index a61c6e53ab814..eb04382dda45c 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/BatchGetUtils.java @@ -8,8 +8,6 @@ import com.linkedin.restli.client.base.BatchGetEntityRequestBuilderBase; import com.linkedin.restli.common.ComplexResourceKey; import com.linkedin.restli.common.EmptyRecord; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -18,47 +16,52 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public final class BatchGetUtils { - private BatchGetUtils() { - // not called - } - - private static int batchSize = 25; + private BatchGetUtils() { + // not called + } - public static < - U extends Urn, - T extends RecordTemplate, - CRK extends ComplexResourceKey, - RB extends BatchGetEntityRequestBuilderBase, - K extends RecordTemplate> Map batchGet( - @Nonnull Set urns, - Function> requestBuilders, - Function getKeyFromUrn, - Function getUrnFromKey, - Client client - ) throws RemoteInvocationException { - AtomicInteger index = new AtomicInteger(0); + private static int batchSize = 25; - final Collection> entityUrnBatches = urns.stream() - .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) - .values(); + public static < + U extends Urn, + T extends RecordTemplate, + CRK extends ComplexResourceKey, + RB extends BatchGetEntityRequestBuilderBase, + K extends RecordTemplate> + Map batchGet( + @Nonnull Set urns, + Function> requestBuilders, + Function getKeyFromUrn, + Function getUrnFromKey, + Client client) + throws RemoteInvocationException { + AtomicInteger index = new AtomicInteger(0); - final Map response = new HashMap<>(); + final Collection> entityUrnBatches = + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); - for (List urnsInBatch : entityUrnBatches) { - BatchGetEntityRequest batchGetRequest = - requestBuilders.apply(null) - .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) - .build(); - final Map batchResponse = client.sendRequest(batchGetRequest).getResponseEntity().getResults() - .entrySet().stream().collect(Collectors.toMap( - entry -> getUrnFromKey.apply(entry.getKey()), - entry -> entry.getValue().getEntity()) - ); - response.putAll(batchResponse); - } + final Map response = new HashMap<>(); - return response; + for (List urnsInBatch : entityUrnBatches) { + BatchGetEntityRequest batchGetRequest = + requestBuilders + .apply(null) + .ids(urnsInBatch.stream().map(getKeyFromUrn).collect(Collectors.toSet())) + .build(); + final Map batchResponse = + client.sendRequest(batchGetRequest).getResponseEntity().getResults().entrySet().stream() + .collect( + Collectors.toMap( + entry -> getUrnFromKey.apply(entry.getKey()), + entry -> entry.getValue().getEntity())); + response.putAll(batchResponse); } + + return response; + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java index 1ba0e5c3d555a..4474fd5ce67ec 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java @@ -5,20 +5,17 @@ import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; - -import java.util.Objects; -import java.util.Set; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - import com.linkedin.restli.client.AbstractRequestBuilder; import com.linkedin.restli.client.Client; import com.linkedin.restli.client.Request; import com.linkedin.restli.client.Response; +import java.util.Objects; +import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.http.HttpHeaders; - @Slf4j public abstract class BaseClient implements AutoCloseable { @@ -26,7 +23,8 @@ public abstract class BaseClient implements AutoCloseable { protected final BackoffPolicy _backoffPolicy; protected final int _retryCount; - protected final static Set NON_RETRYABLE = Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); + protected static final Set NON_RETRYABLE = + Set.of("com.linkedin.data.template.RequiredFieldNotPresentException"); protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, int retryCount) { _client = Objects.requireNonNull(restliClient); @@ -34,16 +32,20 @@ protected BaseClient(@Nonnull Client restliClient, BackoffPolicy backoffPolicy, _retryCount = retryCount; } - protected Response sendClientRequest(final AbstractRequestBuilder> requestBuilder) throws RemoteInvocationException { + protected Response sendClientRequest( + final AbstractRequestBuilder> requestBuilder) + throws RemoteInvocationException { return sendClientRequest(requestBuilder, null); } /** - * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + systemClientSecret. + * TODO: Remove unused "actor" parameter. Actor is now implied by the systemClientId + + * systemClientSecret. */ protected Response sendClientRequest( final AbstractRequestBuilder> requestBuilder, - @Nullable final Authentication authentication) throws RemoteInvocationException { + @Nullable final Authentication authentication) + throws RemoteInvocationException { if (authentication != null) { requestBuilder.addHeader(HttpHeaders.AUTHORIZATION, authentication.getCredentials()); } @@ -54,10 +56,15 @@ protected Response sendClientRequest( try { return _client.sendRequest(requestBuilder.build()).getResponse(); } catch (Throwable ex) { - MetricUtils.counter(BaseClient.class, "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()).inc(); - - final boolean skipRetry = NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) - || (ex.getCause() != null && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); + MetricUtils.counter( + BaseClient.class, + "exception" + MetricUtils.DELIMITER + ex.getClass().getName().toLowerCase()) + .inc(); + + final boolean skipRetry = + NON_RETRYABLE.contains(ex.getClass().getCanonicalName()) + || (ex.getCause() != null + && NON_RETRYABLE.contains(ex.getCause().getClass().getCanonicalName())); if (attemptCount == _retryCount || skipRetry) { throw ex; diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java index 79d473d1b0090..56565819afc30 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java @@ -9,126 +9,138 @@ import com.github.benmanes.caffeine.cache.stats.CacheStats; import com.linkedin.metadata.config.cache.client.ClientCacheConfig; import com.linkedin.metadata.utils.metrics.MetricUtils; -import lombok.Builder; -import lombok.NonNull; -import lombok.extern.slf4j.Slf4j; -import org.checkerframework.checker.nullness.qual.Nullable; - import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.function.Function; +import lombok.Builder; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.checkerframework.checker.nullness.qual.Nullable; /** * Generic cache with common configuration for limited weight, per item expiry, and batch loading + * * @param key * @param value */ @Slf4j @Builder public class ClientCache { - @NonNull - protected final C config; - @NonNull - protected final LoadingCache cache; - @NonNull - private final Function, Map> loadFunction; - @NonNull - private final Weigher weigher; - @NonNull - private final BiFunction ttlSecondsFunction; - - public @Nullable V get(@NonNull K key) { - return cache.get(key); - } + @NonNull protected final C config; + @NonNull protected final LoadingCache cache; + @NonNull private final Function, Map> loadFunction; + @NonNull private final Weigher weigher; + @NonNull private final BiFunction ttlSecondsFunction; + + public @Nullable V get(@NonNull K key) { + return cache.get(key); + } + + public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable keys) { + return cache.getAll(keys); + } - public @NonNull Map<@NonNull K, @NonNull V> getAll(@NonNull Iterable keys) { - return cache.getAll(keys); + public void refresh(@NonNull K key) { + cache.refresh(key); + } + + public static class ClientCacheBuilder { + + private ClientCacheBuilder cache(LoadingCache cache) { + return null; } - public void refresh(@NonNull K key) { - cache.refresh(key); + private ClientCache build() { + return null; } - public static class ClientCacheBuilder { - - private ClientCacheBuilder cache(LoadingCache cache) { - return null; - } - private ClientCache build() { - return null; - } - - public ClientCache build(Class metricClazz) { - // loads data from entity client - CacheLoader loader = new CacheLoader() { - @Override - public V load(@NonNull K key) { - return loadAll(Set.of(key)).get(key); - } - - @Override - @NonNull - public Map loadAll(@NonNull Set keys) { - return loadFunction.apply(keys); - } - }; - - // build cache - Caffeine caffeine = Caffeine.newBuilder() - .maximumWeight(config.getMaxBytes()) - // limit total size - .weigher(weigher) - .softValues() - // define per entity/aspect ttls - .expireAfter(new Expiry() { - public long expireAfterCreate(@NonNull K key, @NonNull V aspect, long currentTime) { - int ttlSeconds = ttlSecondsFunction.apply(config, key); - if (ttlSeconds < 0) { - ttlSeconds = Integer.MAX_VALUE; - } - return TimeUnit.SECONDS.toNanos(ttlSeconds); - } - public long expireAfterUpdate(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - public long expireAfterRead(@NonNull K key, @NonNull V aspect, - long currentTime, long currentDuration) { - return currentDuration; - } - }); - - if (config.isStatsEnabled()) { - caffeine.recordStats(); + public ClientCache build(Class metricClazz) { + // loads data from entity client + CacheLoader loader = + new CacheLoader() { + @Override + public V load(@NonNull K key) { + return loadAll(Set.of(key)).get(key); } - LoadingCache cache = caffeine.build(loader); - - if (config.isStatsEnabled()) { - ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); - executor.scheduleAtFixedRate(() -> { - CacheStats cacheStats = cache.stats(); - - MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge) cacheStats::hitRate); - MetricUtils.gauge(metricClazz, "loadFailureRate", () -> - (Gauge) cacheStats::loadFailureRate); - MetricUtils.gauge(metricClazz, "evictionCount", () -> - (Gauge) cacheStats::evictionCount); - MetricUtils.gauge(metricClazz, "loadFailureCount", () -> - (Gauge) cacheStats::loadFailureCount); - MetricUtils.gauge(metricClazz, "averageLoadPenalty", () -> - (Gauge) cacheStats::averageLoadPenalty); - MetricUtils.gauge(metricClazz, "evictionWeight", () -> - (Gauge) cacheStats::evictionWeight); - - log.debug(metricClazz.getSimpleName() + ": " + cacheStats); - }, 0, config.getStatsIntervalSeconds(), TimeUnit.SECONDS); + @Override + @NonNull + public Map loadAll(@NonNull Set keys) { + return loadFunction.apply(keys); } - - return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); - } + }; + + // build cache + Caffeine caffeine = + Caffeine.newBuilder() + .maximumWeight(config.getMaxBytes()) + // limit total size + .weigher(weigher) + .softValues() + // define per entity/aspect ttls + .expireAfter( + new Expiry() { + public long expireAfterCreate( + @NonNull K key, @NonNull V aspect, long currentTime) { + int ttlSeconds = ttlSecondsFunction.apply(config, key); + if (ttlSeconds < 0) { + ttlSeconds = Integer.MAX_VALUE; + } + return TimeUnit.SECONDS.toNanos(ttlSeconds); + } + + public long expireAfterUpdate( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + + public long expireAfterRead( + @NonNull K key, @NonNull V aspect, long currentTime, long currentDuration) { + return currentDuration; + } + }); + + if (config.isStatsEnabled()) { + caffeine.recordStats(); + } + + LoadingCache cache = caffeine.build(loader); + + if (config.isStatsEnabled()) { + ScheduledThreadPoolExecutor executor = new ScheduledThreadPoolExecutor(1); + executor.scheduleAtFixedRate( + () -> { + CacheStats cacheStats = cache.stats(); + + MetricUtils.gauge(metricClazz, "hitRate", () -> (Gauge) cacheStats::hitRate); + MetricUtils.gauge( + metricClazz, + "loadFailureRate", + () -> (Gauge) cacheStats::loadFailureRate); + MetricUtils.gauge( + metricClazz, "evictionCount", () -> (Gauge) cacheStats::evictionCount); + MetricUtils.gauge( + metricClazz, + "loadFailureCount", + () -> (Gauge) cacheStats::loadFailureCount); + MetricUtils.gauge( + metricClazz, + "averageLoadPenalty", + () -> (Gauge) cacheStats::averageLoadPenalty); + MetricUtils.gauge( + metricClazz, "evictionWeight", () -> (Gauge) cacheStats::evictionWeight); + + log.debug(metricClazz.getSimpleName() + ": " + cacheStats); + }, + 0, + config.getStatsIntervalSeconds(), + TimeUnit.SECONDS); + } + + return new ClientCache<>(config, cache, loadFunction, weigher, ttlSecondsFunction); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java index 84d0ed6b9594d..7bc50a8f3dc7e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java @@ -45,7 +45,8 @@ public EntityResponse getV2( @Nonnull String entityName, @Nonnull final Urn urn, @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated @@ -57,18 +58,21 @@ public Map batchGetV2( @Nonnull String entityName, @Nonnull final Set urns, @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull Map batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set versionedUrns, @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException; + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException; @Nonnull @Deprecated - public Map batchGet(@Nonnull final Set urns, @Nonnull final Authentication authentication) + public Map batchGet( + @Nonnull final Set urns, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -81,9 +85,14 @@ public Map batchGet(@Nonnull final Set urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Gets browse snapshot of a given path @@ -94,8 +103,12 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -109,8 +122,13 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map requestFilters, int start, int limit, @Nonnull Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map requestFilters, + int start, + int limit, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -125,8 +143,14 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated @@ -134,11 +158,15 @@ public void update(@Nonnull final Entity entity, @Nonnull final Authentication a throws RemoteInvocationException; @Deprecated - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; @Deprecated - public void batchUpdate(@Nonnull final Set entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set entities, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -153,15 +181,20 @@ public void batchUpdate(@Nonnull final Set entities, @Nonnull final Auth * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map requestFilters, int start, int count, @Nonnull Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException; /** * Filters for entities matching to a given query and filters * - * TODO: This no longer has any usages, can we deprecate/remove? + *

TODO: This no longer has any usages, can we deprecate/remove? * * @param requestFilters search filters * @param start start offset for search results @@ -170,8 +203,13 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map requestFilters, int start, int count, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public ListResult list( + @Nonnull String entity, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Searches for datasets matching to a given query and filters @@ -186,9 +224,16 @@ public ListResult list(@Nonnull String entity, @Nullable Map req * @throws RemoteInvocationException */ @Nonnull - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull Authentication authentication, - @Nullable SearchFlags searchFlags) throws RemoteInvocationException; + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication, + @Nullable SearchFlags searchFlags) + throws RemoteInvocationException; /** * Searches for entities matching to a given query and filters across multiple entity types @@ -203,9 +248,15 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication) throws RemoteInvocationException; /** @@ -222,9 +273,16 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull Authentication authentication, List facets) + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull Authentication authentication, + List facets) throws RemoteInvocationException; /** @@ -240,8 +298,14 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, @Nullable SearchFlags searchFlags, + ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) throws RemoteInvocationException; @@ -258,43 +322,57 @@ ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull Strin * @param start index to start the search from * @param count the number of search hits to return * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** - * Gets a list of documents that match given search request that is related to - * the input entity + * Gets a list of documents that match given search request that is related to the input entity * - * @param sourceUrn Urn of the source entity - * @param direction Direction of the relationship - * @param entities list of entities to search (If empty, searches - * across all entities) - * @param input the search input text - * @param maxHops the max number of hops away to search for. If null, - * searches all hops. - * @param filter the request map with fields and values as filters - * to be applied to search hits - * @param sortCriterion {@link SortCriterion} to be applied to search - * results - * @param start index to start the search from - * @param count the number of search hits to return - * @param endTimeMillis end time to filter to + * @param sourceUrn Urn of the source entity + * @param direction Direction of the relationship + * @param entities list of entities to search (If empty, searches across all entities) + * @param input the search input text + * @param maxHops the max number of hops away to search for. If null, searches all hops. + * @param filter the request map with fields and values as filters to be applied to search hits + * @param sortCriterion {@link SortCriterion} to be applied to search results + * @param start index to start the search from + * @param count the number of search hits to return + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param searchFlags configuration flags for the search request - * @return a {@link SearchResult} that contains a list of matched documents and - * related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, + @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -309,16 +387,27 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll ID indicating offset * @param keepAlive string representation of time to keep point in time alive, ex: 5m - * @param endTimeMillis end time to filter to + * @param endTimeMillis end time to filter to * @param startTimeMillis start time to filter from * @param count the number of search hits to return - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException; @@ -333,28 +422,29 @@ LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull Lineage public StringArray getBrowsePaths(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; - public void setWritable(boolean canWrite, @Nonnull Authentication authentication) throws RemoteInvocationException; + public void setWritable(boolean canWrite, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nonnull - public Map batchGetTotalEntityCount(@Nonnull List entityName, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public Map batchGetTotalEntityCount( + @Nonnull List entityName, @Nonnull Authentication authentication) + throws RemoteInvocationException; - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException; + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException; - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; - /** - * Delete all references to an entity with a particular urn. - */ - public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authentication authentication) + /** Delete all references to an entity with a particular urn. */ + public void deleteEntityReferences( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException; /** @@ -369,68 +459,96 @@ public void deleteEntityReferences(@Nonnull final Urn urn, @Nonnull final Authen * @throws RemoteInvocationException */ @Nonnull - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull Authentication authentication) throws RemoteInvocationException; + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull Authentication authentication) + throws RemoteInvocationException; /** * Checks whether an entity with a given urn exists * * @param urn the urn of the entity - * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This means that the entity - * has not been hard-deleted. + * @return true if an entity exists, i.e. there are > 0 aspects in the DB for the entity. This + * means that the entity has not been hard-deleted. * @throws RemoteInvocationException */ @Nonnull - public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException; + public boolean exists(@Nonnull Urn urn, @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; @Nullable @Deprecated - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - default List getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nonnull Authentication authentication) + default List getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nonnull Authentication authentication) throws RemoteInvocationException { return getTimeseriesAspectValues( - urn, - entity, - aspect, - startTimeMillis, - endTimeMillis, - limit, - filter, - null, - authentication); + urn, entity, aspect, startTimeMillis, endTimeMillis, limit, filter, null, authentication); } - public List getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull Authentication authentication) + public List getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return ingestProposal(metadataChangeProposal, authentication, false); } - String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException; + String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException; @Deprecated - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, @Nonnull final Authentication authentication) { return wrappedIngestProposal(metadataChangeProposal, authentication, false); } - default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, final boolean async) { + default String wrappedIngestProposal( + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) { try { return ingestProposal(metadataChangeProposal, authentication, async); } catch (RemoteInvocationException e) { @@ -439,13 +557,18 @@ default String wrappedIngestProposal(@Nonnull MetadataChangeProposal metadataCha } @Deprecated - default List batchIngestProposals(@Nonnull final Collection metadataChangeProposals, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + default List batchIngestProposals( + @Nonnull final Collection metadataChangeProposals, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { return batchIngestProposals(metadataChangeProposals, authentication, false); } - default List batchIngestProposals(@Nonnull final Collection metadataChangeProposals, - @Nonnull final Authentication authentication, final boolean async) throws RemoteInvocationException { + default List batchIngestProposals( + @Nonnull final Collection metadataChangeProposals, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { return metadataChangeProposals.stream() .map(proposal -> wrappedIngestProposal(proposal, authentication, async)) .collect(Collectors.toList()); @@ -453,16 +576,29 @@ default List batchIngestProposals(@Nonnull final Collection Optional getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class aspectClass, @Nonnull Authentication authentication) + public Optional getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class aspectClass, + @Nonnull Authentication authentication) throws RemoteInvocationException; @Deprecated - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException; + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException; - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, - @Nonnull Authentication authentication) throws Exception; + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull Authentication authentication) + throws Exception; - public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) throws Exception; + public void rollbackIngestion(@Nonnull String runId, @Nonnull Authentication authentication) + throws Exception; } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java index 8e103cff283ea..453eecab7b446 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java @@ -1,5 +1,7 @@ package com.linkedin.entity.client; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.github.benmanes.caffeine.cache.LoadingCache; import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; @@ -9,11 +11,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.util.Pair; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Collection; import java.util.Map; import java.util.Optional; @@ -22,116 +19,144 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class EntityClientCache { - @NonNull - private EntityClientCacheConfig config; - @NonNull - private final ClientCache cache; - @NonNull - private BiFunction, Set, Map> loadFunction; - - public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { - return batchGetV2(Set.of(urn), aspectNames).get(urn); - } - - public Map batchGetV2(@Nonnull final Set urns, @Nonnull final Set aspectNames) { - final Map response; - - if (config.isEnabled()) { - Set keys = urns.stream() - .flatMap(urn -> aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) - .collect(Collectors.toSet()); - Map envelopedAspects = cache.getAll(keys); - - Set responses = envelopedAspects.entrySet().stream() - .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) - .collect(Collectors.groupingBy(Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) - .entrySet().stream().map(e -> toEntityResponse(e.getKey(), e.getValue())) - .collect(Collectors.toSet()); - - response = responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); - } else { - response = loadFunction.apply(urns, aspectNames); - } - - return response; + @NonNull private EntityClientCacheConfig config; + @NonNull private final ClientCache cache; + @NonNull private BiFunction, Set, Map> loadFunction; + + public EntityResponse getV2(@Nonnull final Urn urn, @Nonnull final Set aspectNames) { + return batchGetV2(Set.of(urn), aspectNames).get(urn); + } + + public Map batchGetV2( + @Nonnull final Set urns, @Nonnull final Set aspectNames) { + final Map response; + + if (config.isEnabled()) { + Set keys = + urns.stream() + .flatMap( + urn -> + aspectNames.stream().map(a -> Key.builder().urn(urn).aspectName(a).build())) + .collect(Collectors.toSet()); + Map envelopedAspects = cache.getAll(keys); + + Set responses = + envelopedAspects.entrySet().stream() + .map(entry -> Pair.of(entry.getKey().getUrn(), entry.getValue())) + .collect( + Collectors.groupingBy( + Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toSet()))) + .entrySet() + .stream() + .map(e -> toEntityResponse(e.getKey(), e.getValue())) + .collect(Collectors.toSet()); + + response = + responses.stream().collect(Collectors.toMap(EntityResponse::getUrn, Function.identity())); + } else { + response = loadFunction.apply(urns, aspectNames); } - private static EntityResponse toEntityResponse(Urn urn, Collection envelopedAspects) { - final EntityResponse response = new EntityResponse(); - response.setUrn(urn); - response.setEntityName(urnToEntityName(urn)); - response.setAspects(new EnvelopedAspectMap( - envelopedAspects.stream() - .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)) - )); - return response; + return response; + } + + private static EntityResponse toEntityResponse( + Urn urn, Collection envelopedAspects) { + final EntityResponse response = new EntityResponse(); + response.setUrn(urn); + response.setEntityName(urnToEntityName(urn)); + response.setAspects( + new EnvelopedAspectMap( + envelopedAspects.stream() + .collect(Collectors.toMap(EnvelopedAspect::getName, aspect -> aspect)))); + return response; + } + + public static class EntityClientCacheBuilder { + + private EntityClientCacheBuilder cache(LoadingCache cache) { + return this; } - public static class EntityClientCacheBuilder { - - private EntityClientCacheBuilder cache(LoadingCache cache) { - return this; - } - - public EntityClientCache build(Class metricClazz) { - // estimate size - Weigher weighByEstimatedSize = (key, value) -> - value.getValue().data().toString().getBytes().length; - - // batch loads data from entity client (restli or java) - Function, Map> loader = (Iterable keys) -> { - Map> keysByEntity = StreamSupport.stream(keys.spliterator(), true) - .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); - - Map results = keysByEntity.entrySet().stream() - .flatMap(entry -> { - Set urns = entry.getValue().stream() - .map(Key::getUrn) - .collect(Collectors.toSet()); - Set aspects = entry.getValue().stream() - .map(Key::getAspectName) - .collect(Collectors.toSet()); - return loadFunction.apply(urns, aspects).entrySet().stream(); + public EntityClientCache build(Class metricClazz) { + // estimate size + Weigher weighByEstimatedSize = + (key, value) -> value.getValue().data().toString().getBytes().length; + + // batch loads data from entity client (restli or java) + Function, Map> loader = + (Iterable keys) -> { + Map> keysByEntity = + StreamSupport.stream(keys.spliterator(), true) + .collect(Collectors.groupingBy(Key::getEntityName, Collectors.toSet())); + + Map results = + keysByEntity.entrySet().stream() + .flatMap( + entry -> { + Set urns = + entry.getValue().stream() + .map(Key::getUrn) + .collect(Collectors.toSet()); + Set aspects = + entry.getValue().stream() + .map(Key::getAspectName) + .collect(Collectors.toSet()); + return loadFunction.apply(urns, aspects).entrySet().stream(); }) - .flatMap(resp -> resp.getValue().getAspects().values().stream() - .map(envAspect -> { - Key key = Key.builder().urn(resp.getKey()).aspectName(envAspect.getName()).build(); - return Map.entry(key, envAspect); - })).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return results; - }; - - // ideally the cache time comes from caching headers from service, but configuration driven for now - BiFunction ttlSeconds = (config, key) -> - Optional.ofNullable(config.getEntityAspectTTLSeconds()).orElse(Map.of()) - .getOrDefault(key.getEntityName(), Map.of()) - .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); - - cache = ClientCache.builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(metricClazz); - - return new EntityClientCache(config, cache, loadFunction); - } + .flatMap( + resp -> + resp.getValue().getAspects().values().stream() + .map( + envAspect -> { + Key key = + Key.builder() + .urn(resp.getKey()) + .aspectName(envAspect.getName()) + .build(); + return Map.entry(key, envAspect); + })) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return results; + }; + + // ideally the cache time comes from caching headers from service, but configuration driven + // for now + BiFunction ttlSeconds = + (config, key) -> + Optional.ofNullable(config.getEntityAspectTTLSeconds()) + .orElse(Map.of()) + .getOrDefault(key.getEntityName(), Map.of()) + .getOrDefault(key.getAspectName(), config.getDefaultTTLSeconds()); + + cache = + ClientCache.builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(metricClazz); + + return new EntityClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final Urn urn; - private final String aspectName; + @Data + @Builder + protected static class Key { + private final Urn urn; + private final String aspectName; - public String getEntityName() { - return urn.getEntityType(); - } + public String getEntityName() { + return urn.getEntityType(); } + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java index 2716e27518fcc..c854cb9dd279e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/RestliEntityClient.java @@ -91,74 +91,95 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; - @Slf4j public class RestliEntityClient extends BaseClient implements EntityClient { - private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = new EntitiesRequestBuilders(); - private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = new EntitiesV2RequestBuilders(); + private static final EntitiesRequestBuilders ENTITIES_REQUEST_BUILDERS = + new EntitiesRequestBuilders(); + private static final EntitiesV2RequestBuilders ENTITIES_V2_REQUEST_BUILDERS = + new EntitiesV2RequestBuilders(); private static final EntitiesVersionedV2RequestBuilders ENTITIES_VERSIONED_V2_REQUEST_BUILDERS = new EntitiesVersionedV2RequestBuilders(); - private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = new AspectsRequestBuilders(); - private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = new PlatformRequestBuilders(); + private static final AspectsRequestBuilders ASPECTS_REQUEST_BUILDERS = + new AspectsRequestBuilders(); + private static final PlatformRequestBuilders PLATFORM_REQUEST_BUILDERS = + new PlatformRequestBuilders(); private static final RunsRequestBuilders RUNS_REQUEST_BUILDERS = new RunsRequestBuilders(); - public RestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount) { + public RestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount) { super(restliClient, backoffPolicy, retryCount); } @Nullable - public EntityResponse getV2(@Nonnull String entityName, @Nonnull final Urn urn, - @Nullable final Set aspectNames, @Nonnull final Authentication authentication) + public EntityResponse getV2( + @Nonnull String entityName, + @Nonnull final Urn urn, + @Nullable final Set aspectNames, + @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - final EntitiesV2GetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.get() - .aspectsParam(aspectNames) - .id(urn.toString()); + final EntitiesV2GetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS.get().aspectsParam(aspectNames).id(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @Nonnull public Entity get(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication).getEntity(); + return sendClientRequest(ENTITIES_REQUEST_BUILDERS.get().id(urn.toString()), authentication) + .getEntity(); } /** * Legacy! Use {#batchGetV2} instead, as this method leverages Snapshot models, and will not work * for fetching entities + aspects added by Entity Registry configuration. * - * Batch get a set of {@link Entity} objects by urn. + *

Batch get a set of {@link Entity} objects by urn. * * @param urns the urns of the entities to batch get * @param authentication the authentication to include in the request to the Metadata Service * @throws RemoteInvocationException */ @Nonnull - public Map batchGet(@Nonnull final Set urns, @Nonnull final Authentication authentication) + public Map batchGet( + @Nonnull final Set urns, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Integer batchSize = 25; final AtomicInteger index = new AtomicInteger(0); final Collection> entityUrnBatches = - urns.stream().collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)).values(); + urns.stream() + .collect(Collectors.groupingBy(x -> index.getAndIncrement() / batchSize)) + .values(); final Map response = new HashMap<>(); for (List urnsInBatch : entityUrnBatches) { EntitiesBatchGetRequestBuilder batchGetRequestBuilder = - ENTITIES_REQUEST_BUILDERS.batchGet().ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); - final Map batchResponse = sendClientRequest(batchGetRequestBuilder, authentication).getEntity() - .getResults() - .entrySet() - .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Urn from key string %s", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + ENTITIES_REQUEST_BUILDERS + .batchGet() + .ids(urnsInBatch.stream().map(Urn::toString).collect(Collectors.toSet())); + final Map batchResponse = + sendClientRequest(batchGetRequestBuilder, authentication) + .getEntity() + .getResults() + .entrySet() + .stream() + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to create Urn from key string %s", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); response.putAll(batchResponse); } return response; @@ -174,25 +195,36 @@ public Map batchGet(@Nonnull final Set urns, @Nonnull final Au * @throws RemoteInvocationException */ @Nonnull - public Map batchGetV2(@Nonnull String entityName, @Nonnull final Set urns, - @Nullable final Set aspectNames, @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { + public Map batchGetV2( + @Nonnull String entityName, + @Nonnull final Set urns, + @Nullable final Set aspectNames, + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - final EntitiesV2BatchGetRequestBuilder requestBuilder = ENTITIES_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); + final EntitiesV2BatchGetRequestBuilder requestBuilder = + ENTITIES_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .ids(urns.stream().map(Urn::toString).collect(Collectors.toList())); - return sendClientRequest(requestBuilder, authentication).getEntity() + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> { - try { - return Urn.createFromString(entry.getKey()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to bind urn string with value %s into urn", entry.getKey())); - } - }, entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> { + try { + return Urn.createFromString(entry.getKey()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to bind urn string with value %s into urn", entry.getKey())); + } + }, + entry -> entry.getValue().getEntity())); } /** @@ -209,21 +241,31 @@ public Map batchGetVersionedV2( @Nonnull String entityName, @Nonnull final Set versionedUrns, @Nullable final Set aspectNames, - @Nonnull final Authentication authentication) throws RemoteInvocationException, URISyntaxException { - - final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = ENTITIES_VERSIONED_V2_REQUEST_BUILDERS.batchGet() - .aspectsParam(aspectNames) - .entityTypeParam(entityName) - .ids(versionedUrns.stream() - .map(versionedUrn -> com.linkedin.common.urn.VersionedUrn.of(versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) - .collect(Collectors.toSet())); + @Nonnull final Authentication authentication) + throws RemoteInvocationException, URISyntaxException { - return sendClientRequest(requestBuilder, authentication).getEntity() + final EntitiesVersionedV2BatchGetRequestBuilder requestBuilder = + ENTITIES_VERSIONED_V2_REQUEST_BUILDERS + .batchGet() + .aspectsParam(aspectNames) + .entityTypeParam(entityName) + .ids( + versionedUrns.stream() + .map( + versionedUrn -> + com.linkedin.common.urn.VersionedUrn.of( + versionedUrn.getUrn().toString(), versionedUrn.getVersionStamp())) + .collect(Collectors.toSet())); + + return sendClientRequest(requestBuilder, authentication) + .getEntity() .getResults() .entrySet() .stream() - .collect(Collectors.toMap(entry -> - UrnUtils.getUrn(entry.getKey().getUrn()), entry -> entry.getValue().getEntity())); + .collect( + Collectors.toMap( + entry -> UrnUtils.getUrn(entry.getKey().getUrn()), + entry -> entry.getValue().getEntity())); } /** @@ -238,15 +280,22 @@ public Map batchGetVersionedV2( * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nullable String field, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .fieldParam(field) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nullable String field, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .fieldParam(field) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -260,14 +309,20 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull String query, - @Nullable Filter requestFilters, @Nonnull int limit, @Nonnull final Authentication authentication) + public AutoCompleteResult autoComplete( + @Nonnull String entityType, + @Nonnull String query, + @Nullable Filter requestFilters, + @Nonnull int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoAutocompleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionAutocomplete() - .entityParam(entityType) - .queryParam(query) - .filterParam(filterOrDefaultEmptyFilter(requestFilters)) - .limitParam(limit); + EntitiesDoAutocompleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionAutocomplete() + .entityParam(entityType) + .queryParam(query) + .filterParam(filterOrDefaultEmptyFilter(requestFilters)) + .limitParam(limit); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -282,14 +337,21 @@ public AutoCompleteResult autoComplete(@Nonnull String entityType, @Nonnull Stri * @throws RemoteInvocationException */ @Nonnull - public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, - @Nullable Map requestFilters, int start, int limit, @Nonnull final Authentication authentication) + public BrowseResult browse( + @Nonnull String entityType, + @Nonnull String path, + @Nullable Map requestFilters, + int start, + int limit, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoBrowseRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBrowse() - .pathParam(path) - .entityParam(entityType) - .startParam(start) - .limitParam(limit); + EntitiesDoBrowseRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionBrowse() + .pathParam(path) + .entityParam(entityType) + .startParam(start) + .limitParam(limit); if (requestFilters != null) { requestBuilder.filterParam(newFilter(requestFilters)); } @@ -308,31 +370,45 @@ public BrowseResult browse(@Nonnull String entityType, @Nonnull String path, * @throws RemoteInvocationException */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, - @Nonnull String input, int start, int count, @Nonnull Authentication authentication) { + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count, + @Nonnull Authentication authentication) { throw new NotImplementedException("BrowseV2 is not implemented in Restli yet"); } public void update(@Nonnull final Entity entity, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); + EntitiesDoIngestRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity); sendClientRequest(requestBuilder, authentication); } - public void updateWithSystemMetadata(@Nonnull final Entity entity, @Nullable final SystemMetadata systemMetadata, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public void updateWithSystemMetadata( + @Nonnull final Entity entity, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { if (systemMetadata == null) { update(entity, authentication); return; } EntitiesDoIngestRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionIngest().entityParam(entity).systemMetadataParam(systemMetadata); + ENTITIES_REQUEST_BUILDERS + .actionIngest() + .entityParam(entity) + .systemMetadataParam(systemMetadata); sendClientRequest(requestBuilder, authentication); } - public void batchUpdate(@Nonnull final Set entities, @Nonnull final Authentication authentication) + public void batchUpdate( + @Nonnull final Set entities, @Nonnull final Authentication authentication) throws RemoteInvocationException { EntitiesDoBatchIngestRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionBatchIngest().entitiesParam(new EntityArray(entities)); @@ -353,18 +429,25 @@ public void batchUpdate(@Nonnull final Set entities, @Nonnull final Auth */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, - @Nullable Map requestFilters, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .fulltextParam(searchFlags != null ? searchFlags.isFulltext() : null) + .countParam(count); if (searchFlags != null) { requestBuilder.searchFlagsParam(searchFlags); } @@ -382,13 +465,20 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, * @throws RemoteInvocationException */ @Nonnull - public ListResult list(@Nonnull String entity, @Nullable Map requestFilters, int start, int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { - final EntitiesDoListRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionList() - .entityParam(entity) - .filterParam(newFilter(requestFilters)) - .startParam(start) - .countParam(count); + public ListResult list( + @Nonnull String entity, + @Nullable Map requestFilters, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + final EntitiesDoListRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionList() + .entityParam(entity) + .filterParam(newFilter(requestFilters)) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -406,16 +496,24 @@ public ListResult list(@Nonnull String entity, @Nullable Map req */ @Nonnull @Override - public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nullable Filter filter, - SortCriterion sortCriterion, int start, int count, @Nonnull final Authentication authentication, + public SearchResult search( + @Nonnull String entity, + @Nonnull String input, + @Nullable Filter filter, + SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication, @Nullable SearchFlags searchFlags) throws RemoteInvocationException { - final EntitiesDoSearchRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionSearch() - .entityParam(entity) - .inputParam(input) - .startParam(start) - .countParam(count); + final EntitiesDoSearchRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionSearch() + .entityParam(entity) + .inputParam(input) + .startParam(start) + .countParam(count); if (filter != null) { requestBuilder.filterParam(filter); @@ -434,11 +532,18 @@ public SearchResult search(@Nonnull String entity, @Nonnull String input, @Nulla } @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication) + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication) throws RemoteInvocationException { - return searchAcrossEntities(entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); + return searchAcrossEntities( + entities, input, filter, start, count, searchFlags, sortCriterion, authentication, null); } /** @@ -454,13 +559,24 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul * @throws RemoteInvocationException */ @Nonnull - public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, int start, int count, @Nullable SearchFlags searchFlags, - @Nullable SortCriterion sortCriterion, @Nonnull final Authentication authentication, @Nullable List facets) + public SearchResult searchAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + int start, + int count, + @Nullable SearchFlags searchFlags, + @Nullable SortCriterion sortCriterion, + @Nonnull final Authentication authentication, + @Nullable List facets) throws RemoteInvocationException { final EntitiesDoSearchAcrossEntitiesRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossEntities().inputParam(input).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossEntities() + .inputParam(input) + .startParam(start) + .countParam(count); if (entities != null) { requestBuilder.entitiesParam(new StringArray(entities)); @@ -481,9 +597,15 @@ public SearchResult searchAcrossEntities(@Nonnull List entities, @Nonnul @Nonnull @Override - public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnull String input, - @Nullable Filter filter, @Nullable String scrollId, @Nullable String keepAlive, int count, - @Nullable SearchFlags searchFlags, @Nonnull Authentication authentication) + public ScrollResult scrollAcrossEntities( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter filter, + @Nullable String scrollId, + @Nullable String keepAlive, + int count, + @Nullable SearchFlags searchFlags, + @Nonnull Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossEntitiesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionScrollAcrossEntities().inputParam(input).countParam(count); @@ -509,14 +631,23 @@ public ScrollResult scrollAcrossEntities(@Nonnull List entities, @Nonnul @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -538,15 +669,25 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull @Nonnull @Override - public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, int start, int count, @Nullable final Long startTimeMillis, - @Nullable final Long endTimeMillis, @Nullable SearchFlags searchFlags, + public LineageSearchResult searchAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoSearchAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionSearchAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionSearchAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -572,16 +713,26 @@ public LineageSearchResult searchAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull return sendClientRequest(requestBuilder, authentication).getEntity(); } - @Override - public LineageScrollResult scrollAcrossLineage(@Nonnull Urn sourceUrn, @Nonnull LineageDirection direction, - @Nonnull List entities, @Nonnull String input, @Nullable Integer maxHops, @Nullable Filter filter, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nonnull String keepAlive, int count, - @Nullable final Long startTimeMillis, @Nullable final Long endTimeMillis, @Nullable final SearchFlags searchFlags, + public LineageScrollResult scrollAcrossLineage( + @Nonnull Urn sourceUrn, + @Nonnull LineageDirection direction, + @Nonnull List entities, + @Nonnull String input, + @Nullable Integer maxHops, + @Nullable Filter filter, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nonnull String keepAlive, + int count, + @Nullable final Long startTimeMillis, + @Nullable final Long endTimeMillis, + @Nullable final SearchFlags searchFlags, @Nonnull final Authentication authentication) throws RemoteInvocationException { final EntitiesDoScrollAcrossLineageRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionScrollAcrossLineage() + ENTITIES_REQUEST_BUILDERS + .actionScrollAcrossLineage() .urnParam(sourceUrn.toString()) .directionParam(direction.name()) .inputParam(input) @@ -633,51 +784,66 @@ public void setWritable(boolean canWrite, @Nonnull final Authentication authenti } @Nonnull - public Map batchGetTotalEntityCount(@Nonnull List entityName, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public Map batchGetTotalEntityCount( + @Nonnull List entityName, @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoBatchGetTotalEntityCountRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionBatchGetTotalEntityCount().entitiesParam(new StringArray(entityName)); + ENTITIES_REQUEST_BUILDERS + .actionBatchGetTotalEntityCount() + .entitiesParam(new StringArray(entityName)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * List all urns existing for a particular Entity type. - */ - public ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + /** List all urns existing for a particular Entity type. */ + public ListUrnsResult listUrns( + @Nonnull final String entityName, + final int start, + final int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { EntitiesDoListUrnsRequestBuilder requestBuilder = - ENTITIES_REQUEST_BUILDERS.actionListUrns().entityParam(entityName).startParam(start).countParam(count); + ENTITIES_REQUEST_BUILDERS + .actionListUrns() + .entityParam(entityName) + .startParam(start) + .countParam(count); return sendClientRequest(requestBuilder, authentication).getEntity(); } - /** - * Hard delete an entity with a particular urn. - */ + /** Hard delete an entity with a particular urn. */ public void deleteEntity(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); + EntitiesDoDeleteRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDelete().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } - /** - * Delete all references to a particular entity. - */ + /** Delete all references to a particular entity. */ @Override public void deleteEntityReferences(@Nonnull Urn urn, @Nonnull Authentication authentication) throws RemoteInvocationException { - EntitiesDoDeleteReferencesRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); + EntitiesDoDeleteReferencesRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionDeleteReferences().urnParam(urn.toString()); sendClientRequest(requestBuilder, authentication); } @Nonnull @Override - public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Nullable SortCriterion sortCriterion, - int start, int count, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoFilterRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionFilter() - .entityParam(entity) - .filterParam(filter) - .startParam(start) - .countParam(count); + public SearchResult filter( + @Nonnull String entity, + @Nonnull Filter filter, + @Nullable SortCriterion sortCriterion, + int start, + int count, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoFilterRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS + .actionFilter() + .entityParam(entity) + .filterParam(filter) + .startParam(start) + .countParam(count); if (sortCriterion != null) { requestBuilder.sortParam(sortCriterion); } @@ -686,9 +852,10 @@ public SearchResult filter(@Nonnull String entity, @Nonnull Filter filter, @Null @Nonnull @Override - public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) throws RemoteInvocationException { - EntitiesDoExistsRequestBuilder requestBuilder = ENTITIES_REQUEST_BUILDERS.actionExists() - .urnParam(urn.toString()); + public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentication) + throws RemoteInvocationException { + EntitiesDoExistsRequestBuilder requestBuilder = + ENTITIES_REQUEST_BUILDERS.actionExists().urnParam(urn.toString()); return sendClientRequest(requestBuilder, authentication).getEntity(); } @@ -700,8 +867,12 @@ public boolean exists(@Nonnull Urn urn, @Nonnull final Authentication authentica * @throws RemoteInvocationException on remote request error. */ @Nonnull - public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -717,8 +888,12 @@ public VersionedAspect getAspect(@Nonnull String urn, @Nonnull String aspect, @N * @throws RemoteInvocationException on remote request error. */ @Nullable - public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull final Authentication authentication) throws RemoteInvocationException { + public VersionedAspect getAspectOrNull( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull final Authentication authentication) + throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = ASPECTS_REQUEST_BUILDERS.get().id(urn).aspectParam(aspect).versionParam(version); @@ -747,13 +922,21 @@ public VersionedAspect getAspectOrNull(@Nonnull String urn, @Nonnull String aspe * @throws RemoteInvocationException on remote request error. */ @Nonnull - public List getTimeseriesAspectValues(@Nonnull String urn, @Nonnull String entity, - @Nonnull String aspect, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, @Nullable Integer limit, - @Nullable Filter filter, @Nullable SortCriterion sort, @Nonnull final Authentication authentication) + public List getTimeseriesAspectValues( + @Nonnull String urn, + @Nonnull String entity, + @Nonnull String aspect, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsDoGetTimeseriesAspectValuesRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionGetTimeseriesAspectValues() + ASPECTS_REQUEST_BUILDERS + .actionGetTimeseriesAspectValues() .urnParam(urn) .entityParam(entity) .aspectParam(aspect); @@ -783,19 +966,29 @@ public List getTimeseriesAspectValues(@Nonnull String urn, @Non /** * Ingest a MetadataChangeProposal event. + * * @return */ @Override - public String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, - @Nonnull final Authentication authentication, - final boolean async) throws RemoteInvocationException { + public String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, + @Nonnull final Authentication authentication, + final boolean async) + throws RemoteInvocationException { final AspectsDoIngestProposalRequestBuilder requestBuilder = - ASPECTS_REQUEST_BUILDERS.actionIngestProposal().proposalParam(metadataChangeProposal).asyncParam(String.valueOf(async)); + ASPECTS_REQUEST_BUILDERS + .actionIngestProposal() + .proposalParam(metadataChangeProposal) + .asyncParam(String.valueOf(async)); return sendClientRequest(requestBuilder, authentication).getEntity(); } - public Optional getVersionedAspect(@Nonnull String urn, @Nonnull String aspect, - @Nonnull Long version, @Nonnull Class aspectClass, @Nonnull final Authentication authentication) + public Optional getVersionedAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Class aspectClass, + @Nonnull final Authentication authentication) throws RemoteInvocationException { AspectsGetRequestBuilder requestBuilder = @@ -825,18 +1018,24 @@ public Optional getVersionedAspect(@Nonnull String @SneakyThrows @Override - public DataMap getRawAspect(@Nonnull String urn, @Nonnull String aspect, @Nonnull Long version, - @Nonnull Authentication authentication) throws RemoteInvocationException { + public DataMap getRawAspect( + @Nonnull String urn, + @Nonnull String aspect, + @Nonnull Long version, + @Nonnull Authentication authentication) + throws RemoteInvocationException { throw new MethodNotSupportedException(); } @Override - public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event, @Nonnull final Authentication authentication) + public void producePlatformEvent( + @Nonnull String name, + @Nullable String key, + @Nonnull PlatformEvent event, + @Nonnull final Authentication authentication) throws Exception { final PlatformDoProducePlatformEventRequestBuilder requestBuilder = - PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent() - .nameParam(name) - .eventParam(event); + PLATFORM_REQUEST_BUILDERS.actionProducePlatformEvent().nameParam(name).eventParam(event); if (key != null) { requestBuilder.keyParam(key); } @@ -846,28 +1045,34 @@ public void producePlatformEvent(@Nonnull String name, @Nullable String key, @No @Override public void rollbackIngestion(@Nonnull String runId, @Nonnull final Authentication authentication) throws Exception { - final RunsDoRollbackRequestBuilder requestBuilder = RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); + final RunsDoRollbackRequestBuilder requestBuilder = + RUNS_REQUEST_BUILDERS.actionRollback().runIdParam(runId).dryRunParam(false); sendClientRequest(requestBuilder, authentication); } - // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared library location. - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // TODO: Refactor QueryUtils inside of metadata-io to extract these methods into a single shared + // library location. + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map params) { if (params == null) { return new Filter().setOr(new ConjunctiveCriterionArray()); } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue(), Condition.EQUAL)) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { return new Criterion().setField(field).setValue(value).setCondition(condition); } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java index 94067abd0cf65..babb290655d3d 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java @@ -7,85 +7,95 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.r2.RemoteInvocationException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Map; import java.util.Set; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -/** - * Adds entity/aspect cache and assumes system authentication - */ +/** Adds entity/aspect cache and assumes system authentication */ public interface SystemEntityClient extends EntityClient { - EntityClientCache getEntityClientCache(); - Authentication getSystemAuthentication(); + EntityClientCache getEntityClientCache(); + + Authentication getSystemAuthentication(); - /** - * Builds the cache - * @param systemAuthentication system authentication - * @param cacheConfig cache configuration - * @return the cache - */ - default EntityClientCache buildEntityClientCache(Class metricClazz, Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - return EntityClientCache.builder() - .config(cacheConfig) - .loadFunction((Set urns, Set aspectNames) -> { - try { - String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); + /** + * Builds the cache + * + * @param systemAuthentication system authentication + * @param cacheConfig cache configuration + * @return the cache + */ + default EntityClientCache buildEntityClientCache( + Class metricClazz, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + return EntityClientCache.builder() + .config(cacheConfig) + .loadFunction( + (Set urns, Set aspectNames) -> { + try { + String entityName = urns.stream().findFirst().map(Urn::getEntityType).get(); - if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { - throw new IllegalArgumentException("Urns must be of the same entity type. RestliEntityClient API limitation."); - } + if (urns.stream().anyMatch(urn -> !urn.getEntityType().equals(entityName))) { + throw new IllegalArgumentException( + "Urns must be of the same entity type. RestliEntityClient API limitation."); + } - return batchGetV2(entityName, urns, aspectNames, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(metricClazz); - } + return batchGetV2(entityName, urns, aspectNames, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(metricClazz); + } - /** - * Get an entity by urn with the given aspects - * @param urn the id of the entity - * @param aspectNames aspects of the entity - * @return response object - * @throws RemoteInvocationException - * @throws URISyntaxException - */ - @Nullable - default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().getV2(urn, aspectNames); - } + /** + * Get an entity by urn with the given aspects + * + * @param urn the id of the entity + * @param aspectNames aspects of the entity + * @return response object + * @throws RemoteInvocationException + * @throws URISyntaxException + */ + @Nullable + default EntityResponse getV2(@Nonnull Urn urn, @Nonnull Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().getV2(urn, aspectNames); + } - /** - * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. - * - * @param urns the urns of the entities to batch get - * @param aspectNames the aspect names to batch get - * @throws RemoteInvocationException - */ - @Nonnull - default Map batchGetV2(@Nonnull Set urns, @Nonnull Set aspectNames) - throws RemoteInvocationException, URISyntaxException { - return getEntityClientCache().batchGetV2(urns, aspectNames); - } + /** + * Batch get a set of aspects for a single entity type, multiple ids with the given aspects. + * + * @param urns the urns of the entities to batch get + * @param aspectNames the aspect names to batch get + * @throws RemoteInvocationException + */ + @Nonnull + default Map batchGetV2( + @Nonnull Set urns, @Nonnull Set aspectNames) + throws RemoteInvocationException, URISyntaxException { + return getEntityClientCache().batchGetV2(urns, aspectNames); + } - default void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { - producePlatformEvent(name, key, event, getSystemAuthentication()); - } + default void producePlatformEvent( + @Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) throws Exception { + producePlatformEvent(name, key, event, getSystemAuthentication()); + } - default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { - return exists(urn, getSystemAuthentication()); - } + default boolean exists(@Nonnull Urn urn) throws RemoteInvocationException { + return exists(urn, getSystemAuthentication()); + } - default String ingestProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) throws RemoteInvocationException { - return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); - } + default String ingestProposal( + @Nonnull final MetadataChangeProposal metadataChangeProposal, final boolean async) + throws RemoteInvocationException { + return ingestProposal(metadataChangeProposal, getSystemAuthentication(), async); + } - default void setWritable(boolean canWrite) throws RemoteInvocationException { - setWritable(canWrite, getSystemAuthentication()); - } + default void setWritable(boolean canWrite) throws RemoteInvocationException { + setWritable(canWrite, getSystemAuthentication()); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java index f3c343534209c..a2f5596af9f4e 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemRestliEntityClient.java @@ -4,22 +4,24 @@ import com.linkedin.metadata.config.cache.client.EntityClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.restli.client.Client; -import lombok.Getter; - import javax.annotation.Nonnull; +import lombok.Getter; -/** - * Restli backed SystemEntityClient - */ +/** Restli backed SystemEntityClient */ @Getter public class SystemRestliEntityClient extends RestliEntityClient implements SystemEntityClient { - private final EntityClientCache entityClientCache; - private final Authentication systemAuthentication; + private final EntityClientCache entityClientCache; + private final Authentication systemAuthentication; - public SystemRestliEntityClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, EntityClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.systemAuthentication = systemAuthentication; - this.entityClientCache = buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); - } + public SystemRestliEntityClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + EntityClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.systemAuthentication = systemAuthentication; + this.entityClientCache = + buildEntityClientCache(SystemRestliEntityClient.class, systemAuthentication, cacheConfig); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java index 850847bfd262a..747e1e0e1a288 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java @@ -2,62 +2,66 @@ import com.datahub.authentication.Authentication; import com.linkedin.common.EntityRelationships; - import com.linkedin.common.WindowDuration; import com.linkedin.common.client.BaseClient; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; import com.linkedin.parseq.retry.backoff.BackoffPolicy; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.restli.client.Client; - import java.net.URISyntaxException; import javax.annotation.Nonnull; - public class UsageClient extends BaseClient { - private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = - new UsageStatsRequestBuilders(); - - private final UsageClientCache usageClientCache; - - public UsageClient(@Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, int retryCount, - Authentication systemAuthentication, UsageClientCacheConfig cacheConfig) { - super(restliClient, backoffPolicy, retryCount); - this.usageClientCache = UsageClientCache.builder() - .config(cacheConfig) - .loadFunction((String resource, UsageTimeRange range) -> { - try { - return getUsageStats(resource, range, systemAuthentication); - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException(e); - } - }).build(); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - * Using cache and system authentication. - * Validate permissions before use! - */ - @Nonnull - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - return usageClientCache.getUsageStats(resource, range); - } - - /** - * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. - */ - @Nonnull - private UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range, - @Nonnull Authentication authentication) - throws RemoteInvocationException, URISyntaxException { - - final UsageStatsDoQueryRangeRequestBuilder requestBuilder = USAGE_STATS_REQUEST_BUILDERS - .actionQueryRange() - .resourceParam(resource) - .durationParam(WindowDuration.DAY) - .rangeFromEndParam(range); - return sendClientRequest(requestBuilder, authentication).getEntity(); - } + private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = + new UsageStatsRequestBuilders(); + + private final UsageClientCache usageClientCache; + + public UsageClient( + @Nonnull final Client restliClient, + @Nonnull final BackoffPolicy backoffPolicy, + int retryCount, + Authentication systemAuthentication, + UsageClientCacheConfig cacheConfig) { + super(restliClient, backoffPolicy, retryCount); + this.usageClientCache = + UsageClientCache.builder() + .config(cacheConfig) + .loadFunction( + (String resource, UsageTimeRange range) -> { + try { + return getUsageStats(resource, range, systemAuthentication); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + }) + .build(); + } + + /** + * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. Using + * cache and system authentication. Validate permissions before use! + */ + @Nonnull + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + return usageClientCache.getUsageStats(resource, range); + } + + /** Gets a specific version of downstream {@link EntityRelationships} for the given dataset. */ + @Nonnull + private UsageQueryResult getUsageStats( + @Nonnull String resource, + @Nonnull UsageTimeRange range, + @Nonnull Authentication authentication) + throws RemoteInvocationException, URISyntaxException { + + final UsageStatsDoQueryRangeRequestBuilder requestBuilder = + USAGE_STATS_REQUEST_BUILDERS + .actionQueryRange() + .resourceParam(resource) + .durationParam(WindowDuration.DAY) + .rangeFromEndParam(range); + return sendClientRequest(requestBuilder, authentication).getEntity(); + } } diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java index 10a1ebb6dcccb..e4c7ed0b674c0 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java @@ -4,70 +4,68 @@ import com.github.benmanes.caffeine.cache.Weigher; import com.linkedin.common.client.ClientCache; import com.linkedin.metadata.config.cache.client.UsageClientCacheConfig; -import lombok.Builder; -import lombok.Data; -import lombok.NonNull; - -import javax.annotation.Nonnull; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.StreamSupport; - +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Data; +import lombok.NonNull; @Builder public class UsageClientCache { - @NonNull - private UsageClientCacheConfig config; - @NonNull - private final ClientCache cache; - @NonNull - private BiFunction loadFunction; + @NonNull private UsageClientCacheConfig config; + @NonNull private final ClientCache cache; + @NonNull private BiFunction loadFunction; - public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { - if (config.isEnabled()) { - return cache.get(Key.builder().resource(resource).range(range).build()); - } else { - return loadFunction.apply(resource, range); - } + public UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range) { + if (config.isEnabled()) { + return cache.get(Key.builder().resource(resource).range(range).build()); + } else { + return loadFunction.apply(resource, range); } + } - public static class UsageClientCacheBuilder { + public static class UsageClientCacheBuilder { - private UsageClientCacheBuilder cache(LoadingCache cache) { - return this; - } + private UsageClientCacheBuilder cache(LoadingCache cache) { + return this; + } - public UsageClientCache build() { - // estimate size - Weigher weighByEstimatedSize = (key, value) -> - value.data().toString().getBytes().length; + public UsageClientCache build() { + // estimate size + Weigher weighByEstimatedSize = + (key, value) -> value.data().toString().getBytes().length; - // batch loads data from usage client - Function, Map> loader = (Iterable keys) -> - StreamSupport.stream(keys.spliterator(), true) - .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + // batch loads data from usage client + Function, Map> loader = + (Iterable keys) -> + StreamSupport.stream(keys.spliterator(), true) + .map(k -> Map.entry(k, loadFunction.apply(k.getResource(), k.getRange()))) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - // default ttl only - BiFunction ttlSeconds = (config, key) -> config.getDefaultTTLSeconds(); + // default ttl only + BiFunction ttlSeconds = + (config, key) -> config.getDefaultTTLSeconds(); - cache = ClientCache.builder() - .weigher(weighByEstimatedSize) - .config(config) - .loadFunction(loader) - .ttlSecondsFunction(ttlSeconds) - .build(UsageClientCache.class); + cache = + ClientCache.builder() + .weigher(weighByEstimatedSize) + .config(config) + .loadFunction(loader) + .ttlSecondsFunction(ttlSeconds) + .build(UsageClientCache.class); - return new UsageClientCache(config, cache, loadFunction); - } + return new UsageClientCache(config, cache, loadFunction); } + } - @Data - @Builder - protected static class Key { - private final String resource; - private final UsageTimeRange range; - } + @Data + @Builder + protected static class Key { + private final String resource; + private final UsageTimeRange range; + } } diff --git a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java index c4109f1daedb3..1f8342170a2ff 100644 --- a/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java +++ b/metadata-service/restli-client/src/test/java/com/linkedin/common/client/BaseClientTest.java @@ -1,5 +1,12 @@ package com.linkedin.common.client; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,62 +22,59 @@ import com.linkedin.restli.client.ResponseFuture; import org.testng.annotations.Test; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.testng.Assert.assertThrows; - public class BaseClientTest { - final static Authentication AUTH = new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - - @Test - public void testZeroRetry() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); + static final Authentication AUTH = + new Authentication(new Actor(ActorType.USER, "fake"), "foo:bar"); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture mockFuture = mock(ResponseFuture.class); - when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); + @Test + public void testZeroRetry() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 0 retries - verify(mockRestliClient).sendRequest(any(ActionRequest.class)); - } + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture mockFuture = mock(ResponseFuture.class); + when(mockRestliClient.sendRequest(any(ActionRequest.class))).thenReturn(mockFuture); - @Test - public void testMultipleRetries() throws RemoteInvocationException { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); - ResponseFuture mockFuture = mock(ResponseFuture.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 0); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 0 retries + verify(mockRestliClient).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException()) - .thenReturn(mockFuture); + @Test + public void testMultipleRetries() throws RemoteInvocationException { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); + ResponseFuture mockFuture = mock(ResponseFuture.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - testClient.sendClientRequest(testRequestBuilder, AUTH); - // Expected 1 actual try and 1 retries - verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException()) + .thenReturn(mockFuture); - @Test - public void testNonRetry() { - MetadataChangeProposal mcp = new MetadataChangeProposal(); - AspectsDoIngestProposalRequestBuilder testRequestBuilder = - new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); - Client mockRestliClient = mock(Client.class); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + testClient.sendClientRequest(testRequestBuilder, AUTH); + // Expected 1 actual try and 1 retries + verify(mockRestliClient, times(2)).sendRequest(any(ActionRequest.class)); + } - when(mockRestliClient.sendRequest(any(ActionRequest.class))) - .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + @Test + public void testNonRetry() { + MetadataChangeProposal mcp = new MetadataChangeProposal(); + AspectsDoIngestProposalRequestBuilder testRequestBuilder = + new AspectsRequestBuilders().actionIngestProposal().proposalParam(mcp).asyncParam("false"); + Client mockRestliClient = mock(Client.class); - RestliEntityClient testClient = new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); - assertThrows(RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); - } + when(mockRestliClient.sendRequest(any(ActionRequest.class))) + .thenThrow(new RuntimeException(new RequiredFieldNotPresentException("value"))); + RestliEntityClient testClient = + new RestliEntityClient(mockRestliClient, new ExponentialBackoff(1), 1); + assertThrows( + RuntimeException.class, () -> testClient.sendClientRequest(testRequestBuilder, AUTH)); + } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java index 98ecf6142ef2c..edd8270e87210 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/filter/RestliLoggingFilter.java @@ -20,11 +20,9 @@ public CompletableFuture onRequest(final FilterRequestContext requestConte return CompletableFuture.completedFuture(null); } - @Override public CompletableFuture onResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { logResponse(requestContext, responseContext); return CompletableFuture.completedFuture(null); } @@ -40,8 +38,7 @@ public CompletableFuture onError( } private void logResponse( - final FilterRequestContext requestContext, - final FilterResponseContext responseContext) { + final FilterRequestContext requestContext, final FilterResponseContext responseContext) { long startTime = (long) requestContext.getFilterScratchpad().get(START_TIME); long endTime = System.currentTimeMillis(); long duration = endTime - startTime; @@ -54,5 +51,4 @@ private void logResponse( log.info("{} {} - {} - {} - {}ms", httpMethod, uri, method, status.getCode(), duration); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java index 1f19094a74654..b8cbf1ceb6794 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java @@ -22,10 +22,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; - -/** - * Rest.li entry point: /analytics - */ +/** Rest.li entry point: /analytics */ @Slf4j @RestLiSimpleResource(name = "analytics", namespace = "com.linkedin.analytics") public class Analytics extends SimpleResourceTemplate { @@ -35,6 +32,7 @@ public class Analytics extends SimpleResourceTemplate getTimeseriesStats( @ActionParam(PARAM_METRICS) @Nonnull AggregationSpec[] aggregationSpecs, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_BUCKETS) @Optional @Nullable GroupingBucket[] groupingBuckets) { - return RestliUtils.toTask(() -> { - log.info("Attempting to query timeseries stats"); - GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); - resp.setEntityName(entityName); - resp.setAspectName(aspectName); - resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); - if (filter != null) { - resp.setFilter(filter); - } - if (groupingBuckets != null) { - resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); - } + return RestliUtils.toTask( + () -> { + log.info("Attempting to query timeseries stats"); + GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); + resp.setEntityName(entityName); + resp.setAspectName(aspectName); + resp.setAggregationSpecs(new AggregationSpecArray(Arrays.asList(aggregationSpecs))); + if (filter != null) { + resp.setFilter(filter); + } + if (groupingBuckets != null) { + resp.setGroupingBuckets(new GroupingBucketArray(Arrays.asList(groupingBuckets))); + } - GenericTable aggregatedStatsTable = - _timeseriesAspectService.getAggregatedStats(entityName, aspectName, aggregationSpecs, filter, - groupingBuckets); - resp.setTable(aggregatedStatsTable); - return resp; - }); + GenericTable aggregatedStatsTable = + _timeseriesAspectService.getAggregatedStats( + entityName, aspectName, aggregationSpecs, filter, groupingBuckets); + resp.setTable(aggregatedStatsTable); + return resp; + }); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index af76af90ce77f..f14dc2e8b2918 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -1,5 +1,10 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.operations.OperationsResource.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -8,10 +13,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.aspect.GetTimeseriesAspectValuesResponse; -import com.linkedin.metadata.entity.IngestResult; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.entity.transactions.AspectsBatch; -import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.EnvelopedAspectArray; @@ -19,9 +20,13 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.AspectUtils; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.IngestResult; +import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.transactions.AspectsBatch; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.resources.operations.Utils; import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; @@ -52,15 +57,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.operations.OperationsResource.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "aspects", namespace = "com.linkedin.entity") public class AspectResource extends CollectionResourceTaskTemplate { @@ -114,66 +111,101 @@ void setAuthorizer(Authorizer authorizer) { @RestMethod.Get @Nonnull @WithSpan - public Task get(@Nonnull String urnStr, @QueryParam("aspect") @Optional @Nullable String aspectName, - @QueryParam("version") @Optional @Nullable Long version) throws URISyntaxException { + public Task get( + @Nonnull String urnStr, + @QueryParam("aspect") @Optional @Nullable String aspectName, + @QueryParam("version") @Optional @Nullable Long version) + throws URISyntaxException { log.info("GET ASPECT urn: {} aspect: {} version: {}", urnStr, aspectName, version); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); - } - final VersionedAspect aspect = _entityService.getVersionedAspect(urn, aspectName, version); - if (aspect == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); - } - return new AnyRecord(aspect.data()); - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); + } + final VersionedAspect aspect = + _entityService.getVersionedAspect(urn, aspectName, version); + if (aspect == null) { + throw RestliUtil.resourceNotFoundException( + String.format( + "Did not find urn: %s aspect: %s version: %s", urn, aspectName, version)); + } + return new AnyRecord(aspect.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @Action(name = ACTION_GET_TIMESERIES_ASPECT) @Nonnull @WithSpan public Task getTimeseriesAspectValues( - @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, @ActionParam(PARAM_LIMIT) @Optional("10000") int limit, - @ActionParam(PARAM_LATEST_VALUE) @Optional("false") boolean latestValue, // This field is deprecated. + @ActionParam(PARAM_LATEST_VALUE) @Optional("false") + boolean latestValue, // This field is deprecated. @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) throws URISyntaxException { + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sort) + throws URISyntaxException { log.info( "Get Timeseries Aspect values for aspect {} for entity {} with startTimeMillis {}, endTimeMillis {} and limit {}.", - aspectName, entityName, startTimeMillis, endTimeMillis, limit); + aspectName, + entityName, + startTimeMillis, + endTimeMillis, + limit); final Urn urn = Urn.createFromString(urnStr); - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get timeseries aspect for " + urn); - } - GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); - response.setEntityName(entityName); - response.setAspectName(aspectName); - if (startTimeMillis != null) { - response.setStartTimeMillis(startTimeMillis); - } - if (endTimeMillis != null) { - response.setEndTimeMillis(endTimeMillis); - } - if (latestValue) { - response.setLimit(1); - } else { - response.setLimit(limit); - } - response.setValues(new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, sort))); - return response; - }, MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, + "User is unauthorized to get timeseries aspect for " + urn); + } + GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); + response.setEntityName(entityName); + response.setAspectName(aspectName); + if (startTimeMillis != null) { + response.setStartTimeMillis(startTimeMillis); + } + if (endTimeMillis != null) { + response.setEndTimeMillis(endTimeMillis); + } + if (latestValue) { + response.setLimit(1); + } else { + response.setLimit(limit); + } + response.setValues( + new EnvelopedAspectArray( + _timeseriesAspectService.getAspectValues( + urn, + entityName, + aspectName, + startTimeMillis, + endTimeMillis, + limit, + filter, + sort))); + return response; + }, + MetricRegistry.name(this.getClass(), "getTimeseriesAspectValues")); } @Action(name = ACTION_INGEST_PROPOSAL) @@ -181,7 +213,8 @@ public Task getTimeseriesAspectValues( @WithSpan public Task ingestProposal( @ActionParam(PARAM_PROPOSAL) @Nonnull MetadataChangeProposal metadataChangeProposal, - @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) throws URISyntaxException { + @ActionParam(PARAM_ASYNC) @Optional(UNSET) String async) + throws URISyntaxException { log.info("INGEST PROPOSAL proposal: {}", metadataChangeProposal); final boolean asyncBool; @@ -192,85 +225,111 @@ public Task ingestProposal( } Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); - Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); + com.linkedin.metadata.models.EntitySpec entitySpec = + _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); + Urn urn = + EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); } String actorUrnStr = authentication.getActor().toUrnStr(); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); - return RestliUtil.toTask(() -> { - log.debug("Proposal: {}", metadataChangeProposal); - try { - final AspectsBatch batch; - if (asyncBool) { - // if async we'll expand the getAdditionalChanges later, no need to do this early - batch = AspectsBatchImpl.builder() - .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) - .build(); - } else { - Stream proposalStream = Stream.concat(Stream.of(metadataChangeProposal), - AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService).stream()); + return RestliUtil.toTask( + () -> { + log.debug("Proposal: {}", metadataChangeProposal); + try { + final AspectsBatch batch; + if (asyncBool) { + // if async we'll expand the getAdditionalChanges later, no need to do this early + batch = + AspectsBatchImpl.builder() + .mcps(List.of(metadataChangeProposal), _entityService.getEntityRegistry()) + .build(); + } else { + Stream proposalStream = + Stream.concat( + Stream.of(metadataChangeProposal), + AspectUtils.getAdditionalChanges(metadataChangeProposal, _entityService) + .stream()); - batch = AspectsBatchImpl.builder() - .mcps(proposalStream.collect(Collectors.toList()), _entityService.getEntityRegistry()) - .build(); - } + batch = + AspectsBatchImpl.builder() + .mcps( + proposalStream.collect(Collectors.toList()), + _entityService.getEntityRegistry()) + .build(); + } - Set results = - _entityService.ingestProposal(batch, auditStamp, asyncBool); + Set results = _entityService.ingestProposal(batch, auditStamp, asyncBool); - IngestResult one = results.stream() - .findFirst() - .get(); + IngestResult one = results.stream().findFirst().get(); - // Update runIds, only works for existing documents, so ES document must exist - Urn resultUrn = one.getUrn(); - if (one.isProcessedMCL() || one.isUpdate()) { - tryIndexRunId(resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); - } - return resultUrn.toString(); - } catch (ValidationException e) { - throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); - } - }, MetricRegistry.name(this.getClass(), "ingestProposal")); + // Update runIds, only works for existing documents, so ES document must exist + Urn resultUrn = one.getUrn(); + if (one.isProcessedMCL() || one.isUpdate()) { + tryIndexRunId( + resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); + } + return resultUrn.toString(); + } catch (ValidationException e) { + throw new RestLiServiceException(HttpStatus.S_422_UNPROCESSABLE_ENTITY, e.getMessage()); + } + }, + MetricRegistry.name(this.getClass(), "ingestProposal")); } @Action(name = ACTION_GET_COUNT) @Nonnull @WithSpan - public Task getCount(@ActionParam(PARAM_ASPECT) @Nonnull String aspectName, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); - } - return _entityService.getCountAspect(aspectName, urnLike); - }, MetricRegistry.name(this.getClass(), "getCount")); + public Task getCount( + @ActionParam(PARAM_ASPECT) @Nonnull String aspectName, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); + } + return _entityService.getCountAspect(aspectName, urnLike); + }, + MetricRegistry.name(this.getClass(), "getCount")); } @Action(name = ACTION_RESTORE_INDICES) @Nonnull @WithSpan - public Task restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn, - @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, - @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + public Task restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn, + @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, + @ActionParam("start") @Optional @Nullable Integer start, + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } - private static void tryIndexRunId(final Urn urn, final @Nullable SystemMetadata systemMetadata, - final EntitySearchService entitySearchService) { + private static void tryIndexRunId( + final Urn urn, + final @Nullable SystemMetadata systemMetadata, + final EntitySearchService entitySearchService) { if (systemMetadata != null && systemMetadata.hasRunId()) { entitySearchService.appendRunId(urn.getEntityType(), urn, systemMetadata.getRunId()); } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 9bab846d1bdcc..294ded8a1e255 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -53,16 +56,11 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * resource for showing information and rolling back runs - */ +/** resource for showing information and rolling back runs */ @Slf4j @RestLiCollection(name = "runs", namespace = "com.linkedin.entity") -public class BatchIngestionRunResource extends CollectionResourceTaskTemplate { +public class BatchIngestionRunResource + extends CollectionResourceTaskTemplate { private static final Integer DEFAULT_OFFSET = 0; private static final Integer DEFAULT_PAGE_SIZE = 100; @@ -91,173 +89,239 @@ public class BatchIngestionRunResource extends CollectionResourceTaskTemplate rollback(@ActionParam("runId") @Nonnull String runId, + public Task rollback( + @ActionParam("runId") @Nonnull String runId, @ActionParam("dryRun") @Optional Boolean dryRun, @Deprecated @ActionParam("hardDelete") @Optional Boolean hardDelete, - @ActionParam("safe") @Optional Boolean safe) throws Exception { + @ActionParam("safe") @Optional Boolean safe) + throws Exception { log.info("ROLLBACK RUN runId: {} dry run: {}", runId, dryRun); - boolean doHardDelete = safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; + boolean doHardDelete = + safe != null ? !safe : hardDelete != null ? hardDelete : DEFAULT_HARD_DELETE; if (safe != null && hardDelete != null) { - log.warn("Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); + log.warn( + "Both Safe & hardDelete flags were defined, honouring safe flag as hardDelete is deprecated"); } try { - return RestliUtil.toTask(() -> { - if (runId.equals(DEFAULT_RUN_ID)) { - throw new IllegalArgumentException(String.format( - "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", - runId)); - } - if (!dryRun) { - updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); - } - - RollbackResponse response = new RollbackResponse(); - List aspectRowsToDelete; - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - Set urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - if (dryRun) { - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - long entitiesDeleted = keyAspects.size(); - long aspectsReverted = aspectRowsToDelete.size(); - - final long affectedEntities = - aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - // If we are soft deleting, remove key aspects from count of aspects being deleted - if (!doHardDelete) { - aspectsReverted -= keyAspects.size(); - rowSummaries.removeIf(AspectRowSummary::isKeyAspect); - } - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - } - - RollbackRunResult rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - final List deletedRows = rollbackRunResult.getRowsRolledBack(); - int rowsDeletedFromEntityDeletion = rollbackRunResult.getRowsDeletedFromEntityDeletion(); - - // since elastic limits how many rows we can access at once, we need to iteratively delete - while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - aspectRowsToDelete = _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); - log.info("{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - log.info("deleting..."); - rollbackRunResult = _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); - deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); - rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); - } - - // Rollback timeseries aspects - DeleteAspectValuesResult timeseriesRollbackResult = _timeseriesAspectService.rollbackTimeseriesAspects(runId); - rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); - - log.info("finished deleting {} rows", deletedRows.size()); - int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; - - final Map> aspectsSplitByIsKeyAspects = - aspectRowsToDelete.stream().collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); - - final List keyAspects = aspectsSplitByIsKeyAspects.get(true); - - final long entitiesDeleted = keyAspects.size(); - final long affectedEntities = - deletedRows.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final AspectRowSummaryArray rowSummaries = - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); - - log.info("computing aspects affected by this rollback..."); - // Compute the aspects that exist referencing the key aspects we are deleting - final List affectedAspectsList = keyAspects.stream() - .map((AspectRowSummary urn) -> _systemMetadataService.findByUrn(urn.getUrn(), false, 0, - ESUtils.MAX_RESULT_SIZE)) - .flatMap(List::stream) - .filter(row -> !row.getRunId().equals(runId) && !row.isKeyAspect() && !row.getAspectName() - .equals(Constants.STATUS_ASPECT_NAME)) - .collect(Collectors.toList()); - - long affectedAspects = affectedAspectsList.size(); - long unsafeEntitiesCount = - affectedAspectsList.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet().size(); - - final List unsafeEntityInfos = - affectedAspectsList.stream().map(AspectRowSummary::getUrn).distinct().map(urn -> { - UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); - unsafeEntityInfo.setUrn(urn); - return unsafeEntityInfo; - }) - // Return at most 1 million rows - .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE).collect(Collectors.toList()); - - log.info("calculation done."); - - updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); - - return response.setAspectsAffected(affectedAspects) - .setAspectsReverted(aspectsReverted) - .setEntitiesAffected(affectedEntities) - .setEntitiesDeleted(entitiesDeleted) - .setUnsafeEntitiesCount(unsafeEntitiesCount) - .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) - .setAspectRowSummaries(rowSummaries); - }, MetricRegistry.name(this.getClass(), "rollback")); + return RestliUtil.toTask( + () -> { + if (runId.equals(DEFAULT_RUN_ID)) { + throw new IllegalArgumentException( + String.format( + "%s is a default run-id provided for non labeled ingestion runs. You cannot delete using this reserved run-id", + runId)); + } + if (!dryRun) { + updateExecutionRequestStatus(runId, ROLLING_BACK_STATUS); + } + + RollbackResponse response = new RollbackResponse(); + List aspectRowsToDelete; + aspectRowsToDelete = + _systemMetadataService.findByRunId(runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + Set urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + if (dryRun) { + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + long entitiesDeleted = keyAspects.size(); + long aspectsReverted = aspectRowsToDelete.size(); + + final long affectedEntities = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + // If we are soft deleting, remove key aspects from count of aspects being deleted + if (!doHardDelete) { + aspectsReverted -= keyAspects.size(); + rowSummaries.removeIf(AspectRowSummary::isKeyAspect); + } + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + } + + RollbackRunResult rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + final List deletedRows = rollbackRunResult.getRowsRolledBack(); + int rowsDeletedFromEntityDeletion = + rollbackRunResult.getRowsDeletedFromEntityDeletion(); + + // since elastic limits how many rows we can access at once, we need to iteratively + // delete + while (aspectRowsToDelete.size() >= ELASTIC_MAX_PAGE_SIZE) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + aspectRowsToDelete = + _systemMetadataService.findByRunId( + runId, doHardDelete, 0, ESUtils.MAX_RESULT_SIZE); + log.info( + "{} remaining rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + log.info("deleting..."); + rollbackRunResult = + _entityService.rollbackRun(aspectRowsToDelete, runId, doHardDelete); + deletedRows.addAll(rollbackRunResult.getRowsRolledBack()); + rowsDeletedFromEntityDeletion += rollbackRunResult.getRowsDeletedFromEntityDeletion(); + } + + // Rollback timeseries aspects + DeleteAspectValuesResult timeseriesRollbackResult = + _timeseriesAspectService.rollbackTimeseriesAspects(runId); + rowsDeletedFromEntityDeletion += timeseriesRollbackResult.getNumDocsDeleted(); + + log.info("finished deleting {} rows", deletedRows.size()); + int aspectsReverted = deletedRows.size() + rowsDeletedFromEntityDeletion; + + final Map> aspectsSplitByIsKeyAspects = + aspectRowsToDelete.stream() + .collect(Collectors.partitioningBy(AspectRowSummary::isKeyAspect)); + + final List keyAspects = aspectsSplitByIsKeyAspects.get(true); + + final long entitiesDeleted = keyAspects.size(); + final long affectedEntities = + deletedRows.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final AspectRowSummaryArray rowSummaries = + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size()))); + + log.info("computing aspects affected by this rollback..."); + // Compute the aspects that exist referencing the key aspects we are deleting + final List affectedAspectsList = + keyAspects.stream() + .map( + (AspectRowSummary urn) -> + _systemMetadataService.findByUrn( + urn.getUrn(), false, 0, ESUtils.MAX_RESULT_SIZE)) + .flatMap(List::stream) + .filter( + row -> + !row.getRunId().equals(runId) + && !row.isKeyAspect() + && !row.getAspectName().equals(Constants.STATUS_ASPECT_NAME)) + .collect(Collectors.toList()); + + long affectedAspects = affectedAspectsList.size(); + long unsafeEntitiesCount = + affectedAspectsList.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet() + .size(); + + final List unsafeEntityInfos = + affectedAspectsList.stream() + .map(AspectRowSummary::getUrn) + .distinct() + .map( + urn -> { + UnsafeEntityInfo unsafeEntityInfo = new UnsafeEntityInfo(); + unsafeEntityInfo.setUrn(urn); + return unsafeEntityInfo; + }) + // Return at most 1 million rows + .limit(DEFAULT_UNSAFE_ENTITIES_PAGE_SIZE) + .collect(Collectors.toList()); + + log.info("calculation done."); + + updateExecutionRequestStatus(runId, ROLLED_BACK_STATUS); + + return response + .setAspectsAffected(affectedAspects) + .setAspectsReverted(aspectsReverted) + .setEntitiesAffected(affectedEntities) + .setEntitiesDeleted(entitiesDeleted) + .setUnsafeEntitiesCount(unsafeEntitiesCount) + .setUnsafeEntities(new UnsafeEntityInfoArray(unsafeEntityInfos)) + .setAspectRowSummaries(rowSummaries); + }, + MetricRegistry.name(this.getClass(), "rollback")); } catch (Exception e) { updateExecutionRequestStatus(runId, ROLLBACK_FAILED_STATUS); - throw new RuntimeException(String.format("There was an issue rolling back ingestion run with runId %s", runId), e); + throw new RuntimeException( + String.format("There was an issue rolling back ingestion run with runId %s", runId), e); } } @@ -279,9 +343,14 @@ private void sleep(Integer seconds) { private void updateExecutionRequestStatus(String runId, String status) { try { - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn( + new ExecutionRequestKey().setId(runId), Constants.EXECUTION_REQUEST_ENTITY_NAME); EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(executionRequestUrn.getEntityType(), executionRequestUrn, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + _entityService.getLatestEnvelopedAspect( + executionRequestUrn.getEntityType(), + executionRequestUrn, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (aspect == null) { log.warn("Aspect for execution request with runId {} not found", runId); } else { @@ -294,65 +363,82 @@ private void updateExecutionRequestStatus(String runId, String status) { proposal.setAspect(GenericRecordUtils.serializeAspect(requestResult)); proposal.setChangeType(ChangeType.UPSERT); - _entityService.ingestProposal(proposal, - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()), false); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()), + false); } } catch (Exception e) { - log.error(String.format("Not able to update execution result aspect with runId %s and new status %s.", runId, status), e); + log.error( + String.format( + "Not able to update execution result aspect with runId %s and new status %s.", + runId, status), + e); } } - /** - * Retrieves the value for an entity that is made up of latest versions of specified aspects. - */ + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @Action(name = "list") @Nonnull @WithSpan - public Task list(@ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, + public Task list( + @ActionParam("pageOffset") @Optional @Nullable Integer pageOffset, @ActionParam("pageSize") @Optional @Nullable Integer pageSize, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft) { log.info("LIST RUNS offset: {} size: {}", pageOffset, pageSize); - return RestliUtil.toTask(() -> { - List summaries = - _systemMetadataService.listRuns(pageOffset != null ? pageOffset : DEFAULT_OFFSET, - pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, - includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); - - return new IngestionRunSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "list")); + return RestliUtil.toTask( + () -> { + List summaries = + _systemMetadataService.listRuns( + pageOffset != null ? pageOffset : DEFAULT_OFFSET, + pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, + includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); + + return new IngestionRunSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "list")); } @Action(name = "describe") @Nonnull @WithSpan - public Task describe(@ActionParam("runId") @Nonnull String runId, - @ActionParam("start") Integer start, @ActionParam("count") Integer count, + public Task describe( + @ActionParam("runId") @Nonnull String runId, + @ActionParam("start") Integer start, + @ActionParam("count") Integer count, @ActionParam("includeSoft") @Optional @Nullable Boolean includeSoft, @ActionParam("includeAspect") @Optional @Nullable Boolean includeAspect) { log.info("DESCRIBE RUN runId: {}, start: {}, count: {}", runId, start, count); - return RestliUtil.toTask(() -> { - List summaries = - _systemMetadataService.findByRunId(runId, includeSoft != null && includeSoft, start, count); - - if (includeAspect != null && includeAspect) { - summaries.forEach(summary -> { - Urn urn = UrnUtils.getUrn(summary.getUrn()); - try { - EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect(urn.getEntityType(), urn, summary.getAspectName()); - if (aspect == null) { - log.error("Aspect for summary {} not found", summary); - } else { - summary.setAspect(aspect.getValue()); - } - } catch (Exception e) { - log.error("Error while fetching aspect for summary {}", summary, e); + return RestliUtil.toTask( + () -> { + List summaries = + _systemMetadataService.findByRunId( + runId, includeSoft != null && includeSoft, start, count); + + if (includeAspect != null && includeAspect) { + summaries.forEach( + summary -> { + Urn urn = UrnUtils.getUrn(summary.getUrn()); + try { + EnvelopedAspect aspect = + _entityService.getLatestEnvelopedAspect( + urn.getEntityType(), urn, summary.getAspectName()); + if (aspect == null) { + log.error("Aspect for summary {} not found", summary); + } else { + summary.setAspect(aspect.getValue()); + } + } catch (Exception e) { + log.error("Error while fetching aspect for summary {}", summary, e); + } + }); } - }); - } - return new AspectRowSummaryArray(summaries); - }, MetricRegistry.name(this.getClass(), "describe")); + return new AspectRowSummaryArray(summaries); + }, + MetricRegistry.name(this.getClass(), "describe")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index 3ee98b3244718..ddf5efa5027ca 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1,5 +1,13 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.validation.ValidationUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.SearchUtils.*; +import static com.linkedin.metadata.shared.ValidationUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -80,18 +88,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.validation.ValidationUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.SearchUtils.*; -import static com.linkedin.metadata.shared.ValidationUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.*; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entities", namespace = "com.linkedin.entity") public class EntityResource extends CollectionResourceTaskTemplate { @@ -121,6 +118,7 @@ public class EntityResource extends CollectionResourceTaskTemplate get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("GET {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); - } - return RestliUtil.toTask(() -> { - final Set projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - final Entity entity = _entityService.getEntity(urn, projectedAspects); - if (entity == null) { - throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); - } - return new AnyRecord(entity.data()); - }, MetricRegistry.name(this.getClass(), "get")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); + } + return RestliUtil.toTask( + () -> { + final Set projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + final Entity entity = _entityService.getEntity(urn, projectedAspects); + if (entity == null) { + throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); + } + return new AnyRecord(entity.data()); + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task> batchGet(@Nonnull Set urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task> batchGet( + @Nonnull Set urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.info("BATCH GET {}", urnStrs); final Set urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } - List> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities: " + urnStrs); - } - return RestliUtil.toTask(() -> { - final Set projectedAspects = - aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - return _entityService.getEntities(urns, projectedAspects) - .entrySet() - .stream() - .collect( - Collectors.toMap(entry -> entry.getKey().toString(), entry -> new AnyRecord(entry.getValue().data()))); - }, MetricRegistry.name(this.getClass(), "batchGet")); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities: " + urnStrs); + } + return RestliUtil.toTask( + () -> { + final Set projectedAspects = + aspectNames == null + ? Collections.emptySet() + : new HashSet<>(Arrays.asList(aspectNames)); + return _entityService.getEntities(urns, projectedAspects).entrySet().stream() + .collect( + Collectors.toMap( + entry -> entry.getKey().toString(), + entry -> new AnyRecord(entry.getValue().data()))); + }, + MetricRegistry.name(this.getClass(), "batchGet")); } private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata systemMetadata) { @@ -234,17 +250,21 @@ private SystemMetadata populateDefaultFieldsIfEmpty(@Nullable SystemMetadata sys @Action(name = ACTION_INGEST) @Nonnull @WithSpan - public Task ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, + public Task ingest( + @ActionParam(PARAM_ENTITY) @Nonnull Entity entity, @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata providedSystemMetadata) throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entity " + urn); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entity " + urn); } try { @@ -255,33 +275,43 @@ public Task ingest(@ActionParam(PARAM_ENTITY) @Nonnull Entity entity, SystemMetadata systemMetadata = populateDefaultFieldsIfEmpty(providedSystemMetadata); - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); // variables referenced in lambdas are required to be final final SystemMetadata finalSystemMetadata = systemMetadata; - return RestliUtil.toTask(() -> { - _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); - return null; - }, MetricRegistry.name(this.getClass(), "ingest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); + return null; + }, + MetricRegistry.name(this.getClass(), "ingest")); } @Action(name = ACTION_BATCH_INGEST) @Nonnull @WithSpan - public Task batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, - @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) throws URISyntaxException { + public Task batchIngest( + @ActionParam(PARAM_ENTITIES) @Nonnull Entity[] entities, + @ActionParam(SYSTEM_METADATA) @Optional @Nullable SystemMetadata[] systemMetadataList) + throws URISyntaxException { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List> resourceSpecs = Arrays.stream(entities) - .map(Entity::getValue) - .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List> resourceSpecs = + Arrays.stream(entities) + .map(Entity::getValue) + .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); } for (Entity entity : entities) { @@ -292,7 +322,8 @@ public Task batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent } } - final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); + final AuditStamp auditStamp = + new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); if (systemMetadataList == null) { systemMetadataList = new SystemMetadata[entities.length]; @@ -302,113 +333,186 @@ public Task batchIngest(@ActionParam(PARAM_ENTITIES) @Nonnull Entity[] ent throw RestliUtil.invalidArgumentsException("entities and systemMetadata length must match"); } - final List finalSystemMetadataList = Arrays.stream(systemMetadataList) - .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) - .collect(Collectors.toList()); + final List finalSystemMetadataList = + Arrays.stream(systemMetadataList) + .map(systemMetadata -> populateDefaultFieldsIfEmpty(systemMetadata)) + .collect(Collectors.toList()); - return RestliUtil.toTask(() -> { - _entityService.ingestEntities(Arrays.asList(entities), auditStamp, finalSystemMetadataList); - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + _entityService.ingestEntities( + Arrays.asList(entities), auditStamp, finalSystemMetadataList); + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } @Action(name = ACTION_SEARCH) @Nonnull @WithSpan - public Task search(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, + public Task search( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency return RestliUtil.toTask( - () -> { - final SearchResult result; - // This API is not used by the frontend for search bars so we default to structured - result = _entitySearchService.search(List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); - return validateSearchResult(result, _entityService); - }, - MetricRegistry.name(this.getClass(), "search")); + () -> { + final SearchResult result; + // This API is not used by the frontend for search bars so we default to structured + result = + _entitySearchService.search( + List.of(entityName), input, filter, sortCriterion, start, count, searchFlags); + return validateSearchResult(result, _entityService); + }, + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_SEARCH_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task searchAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, - @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { + public Task searchAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count, + @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateSearchResult( - _searchService.searchAcrossEntities(entityList, input, filter, sortCriterion, start, count, finalFlags), - _entityService), "searchAcrossEntities"); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateSearchResult( + _searchService.searchAcrossEntities( + entityList, input, filter, sortCriterion, start, count, finalFlags), + _entityService), + "searchAcrossEntities"); } @Action(name = ACTION_SCROLL_ACROSS_ENTITIES) @Nonnull @WithSpan - public Task scrollAcrossEntities(@ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, - @ActionParam(PARAM_INPUT) @Nonnull String input, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_SCROLL_ID) String scrollId, - @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, + public Task scrollAcrossEntities( + @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, + @ActionParam(PARAM_INPUT) @Nonnull String input, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_SCROLL_ID) String scrollId, + @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, + @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, input, scrollId); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); - return RestliUtil.toTask(() -> validateScrollResult( - _searchService.scrollAcrossEntities(entityList, input, filter, sortCriterion, scrollId, keepAlive, count, finalFlags), - _entityService), "scrollAcrossEntities"); + log.info( + "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", + entityList, + input, + scrollId); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true); + return RestliUtil.toTask( + () -> + validateScrollResult( + _searchService.scrollAcrossEntities( + entityList, + input, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + finalFlags), + _entityService), + "scrollAcrossEntities"); } @Action(name = ACTION_SEARCH_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task searchAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task searchAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) throws URISyntaxException { + @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } Urn urn = Urn.createFromString(urnStr); List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - return RestliUtil.toTask(() -> validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, start, count, startTimeMillis, endTimeMillis, searchFlags), _entityService), + log.info( + "GET SEARCH RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + return RestliUtil.toTask( + () -> + validateLineageSearchResult( + _lineageSearchService.searchAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags), + _entityService), "searchAcrossRelationships"); } @Action(name = ACTION_SCROLL_ACROSS_LINEAGE) @Nonnull @WithSpan - public Task scrollAcrossLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, + public Task scrollAcrossLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_ENTITIES) @Optional @Nullable String[] entities, @ActionParam(PARAM_INPUT) @Optional @Nullable String input, @@ -420,72 +524,120 @@ public Task scrollAcrossLineage(@ActionParam(PARAM_URN) @No @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_START_TIME_MILLIS) @Optional @Nullable Long startTimeMillis, @ActionParam(PARAM_END_TIME_MILLIS) @Optional @Nullable Long endTimeMillis, - @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) throws URISyntaxException { + @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); - log.info("GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", - urnStr, direction, entityList, input); - final SearchFlags finalFlags = searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); - return RestliUtil.toTask(() -> validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage(urn, LineageDirection.valueOf(direction), entityList, input, maxHops, - filter, sortCriterion, scrollId, keepAlive, count, startTimeMillis, endTimeMillis, finalFlags), - _entityService), + log.info( + "GET SCROLL RESULTS ACROSS RELATIONSHIPS for source urn {}, direction {}, entities {} with query {}", + urnStr, + direction, + entityList, + input); + final SearchFlags finalFlags = + searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true); + return RestliUtil.toTask( + () -> + validateLineageScrollResult( + _lineageSearchService.scrollAcrossLineage( + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + finalFlags), + _entityService), "scrollAcrossLineage"); } @Action(name = ACTION_LIST) @Nonnull @WithSpan - public Task list(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task list( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); - return RestliUtil.toTask(() -> validateListResult( - toListResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count)), _entityService), + return RestliUtil.toTask( + () -> + validateListResult( + toListResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count)), + _entityService), MetricRegistry.name(this.getClass(), "filter")); } @Action(name = ACTION_AUTOCOMPLETE) @Nonnull @WithSpan - public Task autocomplete(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_QUERY) @Nonnull String query, @ActionParam(PARAM_FIELD) @Optional @Nullable String field, - @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, @ActionParam(PARAM_LIMIT) int limit) { + public Task autocomplete( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_QUERY) @Nonnull String query, + @ActionParam(PARAM_FIELD) @Optional @Nullable String field, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } - return RestliUtil.toTask(() -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), + return RestliUtil.toTask( + () -> _entitySearchService.autoComplete(entityName, query, field, filter, limit), MetricRegistry.name(this.getClass(), "autocomplete")); } @Action(name = ACTION_BROWSE) @Nonnull @WithSpan - public Task browse(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_PATH) @Nonnull String path, @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_LIMIT) int limit) { + public Task browse( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_PATH) @Nonnull String path, + @ActionParam(PARAM_FILTER) @Optional @Nullable Filter filter, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_LIMIT) int limit) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("GET BROWSE RESULTS for {} at path {}", entityName, path); return RestliUtil.toTask( - () -> validateBrowseResult(_entitySearchService.browse(entityName, path, filter, start, limit), _entityService), + () -> + validateBrowseResult( + _entitySearchService.browse(entityName, path, filter, start, limit), + _entityService), MetricRegistry.name(this.getClass(), "browse")); } @@ -496,13 +648,17 @@ public Task getBrowsePaths( @ActionParam(value = PARAM_URN, typeref = com.linkedin.common.Urn.class) @Nonnull Urn urn) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity: " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity: " + urn); } log.info("GET BROWSE PATHS for {}", urn); - return RestliUtil.toTask(() -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), + return RestliUtil.toTask( + () -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), MetricRegistry.name(this.getClass(), "getBrowsePaths")); } @@ -515,12 +671,13 @@ private String stringifyRowCount(int size) { } /* - Used to delete all data related to a filter criteria based on registryId, runId etc. - */ + Used to delete all data related to a filter criteria based on registryId, runId etc. + */ @Action(name = "deleteAll") @Nonnull @WithSpan - public Task deleteEntities(@ActionParam("registryId") @Optional String registryId, + public Task deleteEntities( + @ActionParam("registryId") @Optional String registryId, @ActionParam("dryRun") @Optional Boolean dryRun) { String registryName = null; ComparableVersion registryVersion = new ComparableVersion("0.0.0-dev"); @@ -530,139 +687,188 @@ public Task deleteEntities(@ActionParam("registryId") @Optiona registryName = registryId.split(":")[0]; registryVersion = new ComparableVersion(registryId.split(":")[1]); } catch (Exception e) { - throw new RestLiServiceException(HttpStatus.S_500_INTERNAL_SERVER_ERROR, - "Failed to parse registry id: " + registryId, e); + throw new RestLiServiceException( + HttpStatus.S_500_INTERNAL_SERVER_ERROR, + "Failed to parse registry id: " + registryId, + e); } } String finalRegistryName = registryName; ComparableVersion finalRegistryVersion = registryVersion; String finalRegistryName1 = registryName; ComparableVersion finalRegistryVersion1 = registryVersion; - return RestliUtil.toTask(() -> { - RollbackResponse response = new RollbackResponse(); - List aspectRowsToDelete = - _systemMetadataService.findByRegistry(finalRegistryName, finalRegistryVersion.toString(), false, 0, - ESUtils.MAX_RESULT_SIZE); - log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); - response.setAspectsAffected(aspectRowsToDelete.size()); - Set urns = aspectRowsToDelete.stream().collect(Collectors.groupingBy(AspectRowSummary::getUrn)).keySet(); - List> resourceSpecs = urns.stream() - .map(UrnUtils::getUrn) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entities."); - } - response.setEntitiesAffected(urns.size()); - response.setEntitiesDeleted(aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); - response.setAspectRowSummaries( - new AspectRowSummaryArray(aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); - if ((dryRun == null) || (!dryRun)) { - Map conditions = new HashMap(); - conditions.put("registryName", finalRegistryName1); - conditions.put("registryVersion", finalRegistryVersion1.toString()); - _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); - } - return response; - }, MetricRegistry.name(this.getClass(), "deleteAll")); + return RestliUtil.toTask( + () -> { + RollbackResponse response = new RollbackResponse(); + List aspectRowsToDelete = + _systemMetadataService.findByRegistry( + finalRegistryName, + finalRegistryVersion.toString(), + false, + 0, + ESUtils.MAX_RESULT_SIZE); + log.info("found {} rows to delete...", stringifyRowCount(aspectRowsToDelete.size())); + response.setAspectsAffected(aspectRowsToDelete.size()); + Set urns = + aspectRowsToDelete.stream() + .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) + .keySet(); + List> resourceSpecs = + urns.stream() + .map(UrnUtils::getUrn) + .map( + urn -> + java.util.Optional.of( + new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + } + response.setEntitiesAffected(urns.size()); + response.setEntitiesDeleted( + aspectRowsToDelete.stream().filter(AspectRowSummary::isKeyAspect).count()); + response.setAspectRowSummaries( + new AspectRowSummaryArray( + aspectRowsToDelete.subList(0, Math.min(100, aspectRowsToDelete.size())))); + if ((dryRun == null) || (!dryRun)) { + Map conditions = new HashMap(); + conditions.put("registryName", finalRegistryName1); + conditions.put("registryVersion", finalRegistryVersion1.toString()); + _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); + } + return response; + }, + MetricRegistry.name(this.getClass(), "deleteAll")); } /** * Deletes all data related to an individual urn(entity). + * * @param urnStr - the urn of the entity. - * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only for timeseries aspects). + * @param aspectName - the optional aspect name if only want to delete the aspect (applicable only + * for timeseries aspects). * @param startTimeMills - the optional start time (applicable only for timeseries aspects). * @param endTimeMillis - the optional end time (applicable only for the timeseries aspects). - * @return - a DeleteEntityResponse object. + * @return - a DeleteEntityResponse object. * @throws URISyntaxException */ @Action(name = ACTION_DELETE) @Nonnull @WithSpan - public Task deleteEntity(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, - @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, - @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) throws URISyntaxException { + public Task deleteEntity( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_ASPECT_NAME) @Optional String aspectName, + @ActionParam(PARAM_START_TIME_MILLIS) @Optional Long startTimeMills, + @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) + throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + urnStr); - } - return RestliUtil.toTask(() -> { - // Find the timeseries aspects to delete. If aspectName is null, delete all. - List timeseriesAspectNames = - EntitySpecUtils.getEntityTimeseriesAspectNames(_entityService.getEntityRegistry(), urn.getEntityType()); - if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { - throw new UnsupportedOperationException( + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + urnStr); + } + return RestliUtil.toTask( + () -> { + // Find the timeseries aspects to delete. If aspectName is null, delete all. + List timeseriesAspectNames = + EntitySpecUtils.getEntityTimeseriesAspectNames( + _entityService.getEntityRegistry(), urn.getEntityType()); + if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { + throw new UnsupportedOperationException( String.format("Not supported for non-timeseries aspect '{}'.", aspectName)); - } - List timeseriesAspectsToDelete = + } + List timeseriesAspectsToDelete = (aspectName == null) ? timeseriesAspectNames : ImmutableList.of(aspectName); - DeleteEntityResponse response = new DeleteEntityResponse(); - if (aspectName == null) { - RollbackRunResult result = _entityService.deleteUrn(urn); - response.setRows(result.getRowsDeletedFromEntityDeletion()); - } - Long numTimeseriesDocsDeleted = - deleteTimeseriesAspects(urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); - log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); - - response.setUrn(urnStr); - response.setTimeseriesRows(numTimeseriesDocsDeleted); - - return response; - }, MetricRegistry.name(this.getClass(), "delete")); + DeleteEntityResponse response = new DeleteEntityResponse(); + if (aspectName == null) { + RollbackRunResult result = _entityService.deleteUrn(urn); + response.setRows(result.getRowsDeletedFromEntityDeletion()); + } + Long numTimeseriesDocsDeleted = + deleteTimeseriesAspects( + urn, startTimeMills, endTimeMillis, timeseriesAspectsToDelete); + log.info("Total number of timeseries aspect docs deleted: {}", numTimeseriesDocsDeleted); + + response.setUrn(urnStr); + response.setTimeseriesRows(numTimeseriesDocsDeleted); + + return response; + }, + MetricRegistry.name(this.getClass(), "delete")); } /** - * Deletes the set of timeseries aspect values for the specified aspects that are associated with the given - * entity urn between startTimeMillis and endTimeMillis. + * Deletes the set of timeseries aspect values for the specified aspects that are associated with + * the given entity urn between startTimeMillis and endTimeMillis. + * * @param urn The entity urn whose timeseries aspect values need to be deleted. - * @param startTimeMillis The start time in milliseconds from when the aspect values need to be deleted. - * If this is null, the deletion starts from the oldest value. - * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be deleted. - * If this is null, the deletion will go till the most recent value. + * @param startTimeMillis The start time in milliseconds from when the aspect values need to be + * deleted. If this is null, the deletion starts from the oldest value. + * @param endTimeMillis The end time in milliseconds up to when the aspect values need to be + * deleted. If this is null, the deletion will go till the most recent value. * @param aspectsToDelete - The list of aspect names whose values need to be deleted. * @return The total number of documents deleted. */ - private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, + private Long deleteTimeseriesAspects( + @Nonnull Urn urn, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, @Nonnull List aspectsToDelete) { long totalNumberOfDocsDeleted = 0; Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urn); } // Construct the filter. List criteria = new ArrayList<>(); criteria.add(QueryUtils.newCriterion("urn", urn.toString())); if (startTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, startTimeMillis.toString(), Condition.GREATER_THAN_OR_EQUAL_TO)); } if (endTimeMillis != null) { criteria.add( - QueryUtils.newCriterion(ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + ES_FIELD_TIMESTAMP, endTimeMillis.toString(), Condition.LESS_THAN_OR_EQUAL_TO)); } final Filter filter = QueryUtils.getFilterFromCriteria(criteria); // Delete all the timeseries aspects by the filter. final String entityType = urn.getEntityType(); for (final String aspect : aspectsToDelete) { - DeleteAspectValuesResult result = _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); + DeleteAspectValuesResult result = + _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); totalNumberOfDocsDeleted += result.getNumDocsDeleted(); - log.debug("Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", - entityType, aspect, urn, startTimeMillis, endTimeMillis, result.getNumDocsDeleted()); + log.debug( + "Number of timeseries docs deleted for entity:{}, aspect:{}, urn:{}, startTime:{}, endTime:{}={}", + entityType, + aspect, + urn, + startTimeMillis, + endTimeMillis, + result.getNumDocsDeleted()); } return totalNumberOfDocsDeleted; } @@ -670,19 +876,24 @@ private Long deleteTimeseriesAspects(@Nonnull Urn urn, @Nullable Long startTimeM @Action(name = "deleteReferences") @Nonnull @WithSpan - public Task deleteReferencesTo(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam("dryRun") @Optional Boolean dry) throws URISyntaxException { + public Task deleteReferencesTo( + @ActionParam(PARAM_URN) @Nonnull String urnStr, @ActionParam("dryRun") @Optional Boolean dry) + throws URISyntaxException { boolean dryRun = dry != null ? dry : false; Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urnStr); } - return RestliUtil.toTask(() -> _deleteEntityService.deleteReferencesTo(urn, dryRun), + return RestliUtil.toTask( + () -> _deleteEntityService.deleteReferencesTo(urn, dryRun), MetricRegistry.name(this.getClass(), "deleteReferences")); } @@ -692,18 +903,24 @@ public Task deleteReferencesTo(@ActionParam(PARAM_URN) @Action(name = "setWritable") @Nonnull @WithSpan - public Task setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { + public Task setWriteable( + @ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to enable and disable write mode."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to enable and disable write mode."); } log.info("setting entity resource to be writable"); - return RestliUtil.toTask(() -> { - _entityService.setWritable(value); - return null; - }); + return RestliUtil.toTask( + () -> { + _entityService.setWritable(value); + return null; + }); } @Action(name = "getTotalEntityCount") @@ -712,9 +929,13 @@ public Task setWriteable(@ActionParam(PARAM_VALUE) @Optional("true") @Nonn public Task getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String entityName) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } return RestliUtil.toTask(() -> _entitySearchService.docCount(entityName)); } @@ -722,26 +943,39 @@ public Task getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = "batchGetTotalEntityCount") @Nonnull @WithSpan - public Task batchGetTotalEntityCount(@ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { + public Task batchGetTotalEntityCount( + @ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity counts."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); } - return RestliUtil.toTask(() -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); + return RestliUtil.toTask( + () -> new LongMap(_searchService.docCountPerEntity(Arrays.asList(entityNames)))); } @Action(name = ACTION_LIST_URNS) @Nonnull @WithSpan - public Task listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, - @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) throws URISyntaxException { + public Task listUrns( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, + @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_COUNT) int count) + throws URISyntaxException { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("LIST URNS for {} with start {} and count {}", entityName, start, count); return RestliUtil.toTask(() -> _entityService.listUrns(entityName, start, count), "listUrns"); @@ -750,12 +984,12 @@ public Task listUrns(@ActionParam(PARAM_ENTITY) @Nonnull String @Action(name = ACTION_APPLY_RETENTION) @Nonnull @WithSpan - public Task applyRetention(@ActionParam(PARAM_START) @Optional @Nullable Integer start, - @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, - @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, - @ActionParam(PARAM_URN) @Optional @Nullable String urn - ) { + public Task applyRetention( + @ActionParam(PARAM_START) @Optional @Nullable Integer start, + @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, + @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, + @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, + @ActionParam(PARAM_URN) @Optional @Nullable String urn) { Authentication auth = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -763,47 +997,66 @@ public Task applyRetention(@ActionParam(PARAM_START) @Optional @Nullable resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to apply retention."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to apply retention."); } - return RestliUtil.toTask(() -> _entityService.batchApplyRetention( - start, count, attemptWithVersion, aspectName, urn), ACTION_APPLY_RETENTION); + return RestliUtil.toTask( + () -> _entityService.batchApplyRetention(start, count, attemptWithVersion, aspectName, urn), + ACTION_APPLY_RETENTION); } @Action(name = ACTION_FILTER) @Nonnull @WithSpan - public Task filter(@ActionParam(PARAM_ENTITY) @Nonnull String entityName, + public Task filter( + @ActionParam(PARAM_ENTITY) @Nonnull String entityName, @ActionParam(PARAM_FILTER) Filter filter, - @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, @ActionParam(PARAM_START) int start, + @ActionParam(PARAM_SORT) @Optional @Nullable SortCriterion sortCriterion, + @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to search."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); } log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( - () -> validateSearchResult(_entitySearchService.filter(entityName, filter, sortCriterion, start, count), - _entityService), MetricRegistry.name(this.getClass(), "search")); + () -> + validateSearchResult( + _entitySearchService.filter(entityName, filter, sortCriterion, start, count), + _entityService), + MetricRegistry.name(this.getClass(), "search")); } @Action(name = ACTION_EXISTS) @Nonnull @WithSpan - public Task exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) throws URISyntaxException { + public Task exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) + throws URISyntaxException { Urn urn = UrnUtils.getUrn(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized get entity: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized get entity: " + urnStr); } log.info("EXISTS for {}", urnStr); - return RestliUtil.toTask(() -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); + return RestliUtil.toTask( + () -> _entityService.exists(urn), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java index 0c3e93273b863..9653a20bd8785 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java @@ -1,10 +1,16 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; @@ -34,16 +40,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; - - -/** - * Single unified resource for fetching, updating, searching, & browsing DataHub entities - */ +/** Single unified resource for fetching, updating, searching, & browsing DataHub entities */ @Slf4j @RestLiCollection(name = "entitiesV2", namespace = "com.linkedin.entity") public class EntityV2Resource extends CollectionResourceTaskTemplate { @@ -56,68 +53,90 @@ public class EntityV2Resource extends CollectionResourceTaskTemplate get(@Nonnull String urnStr, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task get( + @Nonnull String urnStr, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("GET V2 {}", urnStr); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), new EntitySpec(urn.getEntityType(), urnStr))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity " + urn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + new EntitySpec(urn.getEntityType(), urnStr))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); } - return RestliUtil.toTask(() -> { - final String entityName = urnToEntityName(urn); - final Set projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntityV2(entityName, urn, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), e); - } - }, MetricRegistry.name(this.getClass(), "get")); + return RestliUtil.toTask( + () -> { + final String entityName = urnToEntityName(urn); + final Set projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntityV2(entityName, urn, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to get entity with urn: %s, aspects: %s", urn, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "get")); } @RestMethod.BatchGet @Nonnull @WithSpan - public Task> batchGet(@Nonnull Set urnStrs, - @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) throws URISyntaxException { + public Task> batchGet( + @Nonnull Set urnStrs, + @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) + throws URISyntaxException { log.debug("BATCH GET V2 {}", urnStrs.toString()); final Set urns = new HashSet<>(); for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } Authentication auth = AuthenticationContext.getAuthentication(); - List> resourceSpecs = urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List> resourceSpecs = + urns.stream() + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entities " + urnStrs); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + urnStrs); } if (urns.size() <= 0) { return Task.value(Collections.emptyMap()); } final String entityName = urnToEntityName(urns.iterator().next()); - return RestliUtil.toTask(() -> { - final Set projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityName) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesV2(entityName, urns, projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get entities with urns: %s, projectedAspects: %s", urns, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityName) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesV2(entityName, urns, projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get entities with urns: %s, projectedAspects: %s", + urns, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java index 05b7e6b3ff24b..fc556d15342c2 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java @@ -1,10 +1,15 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -35,19 +40,16 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - /** * Single unified resource for fetching, updating, searching, & browsing versioned DataHub entities */ @Slf4j -@RestLiCollection(name = "entitiesVersionedV2", namespace = "com.linkedin.entity", +@RestLiCollection( + name = "entitiesVersionedV2", + namespace = "com.linkedin.entity", keyTyperefClass = com.linkedin.common.versioned.VersionedUrn.class) -public class EntityVersionedV2Resource extends CollectionResourceTaskTemplate { +public class EntityVersionedV2Resource + extends CollectionResourceTaskTemplate { @Inject @Named("entityService") @@ -65,36 +67,54 @@ public Task> batchGetVersioned( @QueryParam(PARAM_ENTITY_TYPE) @Nonnull String entityType, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) { Authentication auth = AuthenticationContext.getAuthentication(); - List> resourceSpecs = versionedUrnStrs.stream() - .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); + List> resourceSpecs = + versionedUrnStrs.stream() + .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) + .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) + .collect(Collectors.toList()); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), resourceSpecs)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + resourceSpecs)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + versionedUrnStrs); } log.debug("BATCH GET VERSIONED V2 {}", versionedUrnStrs); if (versionedUrnStrs.size() <= 0) { return Task.value(Collections.emptyMap()); } - return RestliUtil.toTask(() -> { - final Set projectedAspects = - aspectNames == null ? getAllAspectNames(_entityService, entityType) : new HashSet<>(Arrays.asList(aspectNames)); - try { - return _entityService.getEntitiesVersionedV2(versionedUrnStrs.stream() - .map(versionedUrnTyperef -> { - VersionedUrn versionedUrn = new VersionedUrn().setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); - if (versionedUrnTyperef.getVersionStamp() != null) { - versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); - } - return versionedUrn; - }).collect(Collectors.toSet()), projectedAspects); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to batch get versioned entities: %s, projectedAspects: %s", versionedUrnStrs, projectedAspects), - e); - } - }, MetricRegistry.name(this.getClass(), "batchGet")); + return RestliUtil.toTask( + () -> { + final Set projectedAspects = + aspectNames == null + ? getAllAspectNames(_entityService, entityType) + : new HashSet<>(Arrays.asList(aspectNames)); + try { + return _entityService.getEntitiesVersionedV2( + versionedUrnStrs.stream() + .map( + versionedUrnTyperef -> { + VersionedUrn versionedUrn = + new VersionedUrn() + .setUrn(UrnUtils.getUrn(versionedUrnTyperef.getUrn())); + if (versionedUrnTyperef.getVersionStamp() != null) { + versionedUrn.setVersionStamp(versionedUrnTyperef.getVersionStamp()); + } + return versionedUrn; + }) + .collect(Collectors.toSet()), + projectedAspects); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch get versioned entities: %s, projectedAspects: %s", + versionedUrnStrs, projectedAspects), + e); + } + }, + MetricRegistry.name(this.getClass(), "batchGet")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java index 82d29ea00663b..1b22cc135b037 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java @@ -3,14 +3,11 @@ import com.linkedin.metadata.entity.EntityService; import java.util.Set; - public class ResourceUtils { - private ResourceUtils() { - - } + private ResourceUtils() {} - public static Set getAllAspectNames(final EntityService entityService, final String entityName) { + public static Set getAllAspectNames( + final EntityService entityService, final String entityName) { return entityService.getEntityAspectNames(entityName); } - } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java index 4a8e74c89039a..3fdd1d804a83f 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java @@ -1,10 +1,19 @@ package com.linkedin.metadata.resources.lineage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; +import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; +import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; @@ -42,19 +51,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; -import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; -import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; -import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; - - -/** - * Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} - */ +/** Rest.li entry point: /relationships?type={entityType}&direction={direction}&types={types} */ @Slf4j @RestLiSimpleResource(name = "relationships", namespace = "com.linkedin.lineage") public final class Relationships extends SimpleResourceTemplate { @@ -76,14 +73,25 @@ public Relationships() { super(); } - private RelatedEntitiesResult getRelatedEntities(String rawUrn, List relationshipTypes, - RelationshipDirection direction, @Nullable Integer start, @Nullable Integer count) { + private RelatedEntitiesResult getRelatedEntities( + String rawUrn, + List relationshipTypes, + RelationshipDirection direction, + @Nullable Integer start, + @Nullable Integer count) { start = start == null ? 0 : start; count = count == null ? MAX_DOWNSTREAM_CNT : count; - return _graphService.findRelatedEntities(null, newFilter("urn", rawUrn), null, QueryUtils.EMPTY_FILTER, - relationshipTypes, newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), start, count); + return _graphService.findRelatedEntities( + null, + newFilter("urn", rawUrn), + null, + QueryUtils.EMPTY_FILTER, + relationshipTypes, + newRelationshipFilter(QueryUtils.EMPTY_FILTER, direction), + start, + count); } static RelationshipDirection getOppositeDirection(RelationshipDirection direction) { @@ -99,40 +107,55 @@ static RelationshipDirection getOppositeDirection(RelationshipDirection directio @Nonnull @RestMethod.Get @WithSpan - public Task get(@QueryParam("urn") @Nonnull String rawUrn, + public Task get( + @QueryParam("urn") @Nonnull String rawUrn, @QueryParam("types") @Nonnull String[] relationshipTypesParam, - @QueryParam("direction") @Nonnull String rawDirection, @QueryParam("start") @Optional @Nullable Integer start, + @QueryParam("direction") @Nonnull String rawDirection, + @QueryParam("start") @Optional @Nullable Integer start, @QueryParam("count") @Optional @Nullable Integer count) { Urn urn = UrnUtils.getUrn(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + rawUrn); } RelationshipDirection direction = RelationshipDirection.valueOf(rawDirection); final List relationshipTypes = Arrays.asList(relationshipTypesParam); - return RestliUtil.toTask(() -> { - - final RelatedEntitiesResult relatedEntitiesResult = - getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); - final EntityRelationshipArray entityArray = - new EntityRelationshipArray(relatedEntitiesResult.getEntities().stream().map(entity -> { - try { - return new EntityRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType()); - } catch (URISyntaxException e) { - throw new RuntimeException( - String.format("Failed to convert urnStr %s found in the Graph to an Urn object", entity.getUrn())); - } - }).collect(Collectors.toList())); - - return new EntityRelationships().setStart(relatedEntitiesResult.getStart()) - .setCount(relatedEntitiesResult.getCount()) - .setTotal(relatedEntitiesResult.getTotal()) - .setRelationships(entityArray); - }, MetricRegistry.name(this.getClass(), "getLineage")); + return RestliUtil.toTask( + () -> { + final RelatedEntitiesResult relatedEntitiesResult = + getRelatedEntities(rawUrn, relationshipTypes, direction, start, count); + final EntityRelationshipArray entityArray = + new EntityRelationshipArray( + relatedEntitiesResult.getEntities().stream() + .map( + entity -> { + try { + return new EntityRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType()); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to convert urnStr %s found in the Graph to an Urn object", + entity.getUrn())); + } + }) + .collect(Collectors.toList())); + + return new EntityRelationships() + .setStart(relatedEntitiesResult.getStart()) + .setCount(relatedEntitiesResult.getCount()) + .setTotal(relatedEntitiesResult.getTotal()) + .setRelationships(entityArray); + }, + MetricRegistry.name(this.getClass(), "getLineage")); } @Nonnull @@ -141,10 +164,14 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E Urn urn = Urn.createFromString(rawUrn); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to delete entity: " + rawUrn); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + rawUrn); } _graphService.removeNode(urn); return new UpdateResponse(HttpStatus.S_200_OK); @@ -153,22 +180,34 @@ public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws E @Action(name = ACTION_GET_LINEAGE) @Nonnull @WithSpan - public Task getLineage(@ActionParam(PARAM_URN) @Nonnull String urnStr, - @ActionParam(PARAM_DIRECTION) String direction, @ActionParam(PARAM_START) @Optional @Nullable Integer start, + public Task getLineage( + @ActionParam(PARAM_URN) @Nonnull String urnStr, + @ActionParam(PARAM_DIRECTION) String direction, + @ActionParam(PARAM_START) @Optional @Nullable Integer start, @ActionParam(PARAM_COUNT) @Optional @Nullable Integer count, - @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) throws URISyntaxException { + @ActionParam(PARAM_MAX_HOPS) @Optional @Nullable Integer maxHops) + throws URISyntaxException { log.info("GET LINEAGE {} {} {} {} {}", urnStr, direction, start, count, maxHops); final Urn urn = Urn.createFromString(urnStr); Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList(java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to get entity lineage: " + urnStr); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), + Collections.singletonList( + java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + urnStr); } return RestliUtil.toTask( - () -> _graphService.getLineage(urn, LineageDirection.valueOf(direction), start != null ? start : 0, - count != null ? count : 100, maxHops != null ? maxHops : 1), + () -> + _graphService.getLineage( + urn, + LineageDirection.valueOf(direction), + start != null ? start : 0, + count != null ? count : 100, + maxHops != null ? maxHops : 1), MetricRegistry.name(this.getClass(), "getLineage")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java index 1e6523e774d66..499fc0f5221fe 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliConstants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; @@ -35,17 +39,10 @@ import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.tasks.GetTaskResponse; import org.json.JSONObject; +import org.opensearch.client.tasks.GetTaskResponse; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Endpoints for performing maintenance operations - */ +/** Endpoints for performing maintenance operations */ @Slf4j @RestLiCollection(name = "operations", namespace = "com.linkedin.operations") public class OperationsResource extends CollectionResourceTaskTemplate { @@ -67,9 +64,11 @@ public class OperationsResource extends CollectionResourceTaskTemplate restoreIndices(@ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, + public Task restoreIndices( + @ActionParam(PARAM_ASPECT) @Optional @Nonnull String aspectName, @ActionParam(PARAM_URN) @Optional @Nullable String urn, @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike, @ActionParam("start") @Optional @Nullable Integer start, - @ActionParam("batchSize") @Optional @Nullable Integer batchSize - ) { - return RestliUtil.toTask(() -> { - return Utils.restoreIndices(aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); - }, MetricRegistry.name(this.getClass(), "restoreIndices")); + @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { + return RestliUtil.toTask( + () -> { + return Utils.restoreIndices( + aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); + }, + MetricRegistry.name(this.getClass(), "restoreIndices")); } @VisibleForTesting @@ -117,62 +119,86 @@ static boolean isTaskIdValid(String task) { public Task getTaskStatus( @ActionParam(PARAM_NODE_ID) @Optional String nodeId, @ActionParam(PARAM_TASK_ID) @Optional("0") long taskId, - @ActionParam(PARAM_TASK) @Optional String task - ) { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); - } - boolean taskSpecified = task != null; - boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; - if (!taskSpecified && !nodeAndTaskIdSpecified) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify either Node ID + task ID OR composite task parameters"); - } + @ActionParam(PARAM_TASK) @Optional String task) { + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); + } + boolean taskSpecified = task != null; + boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; + if (!taskSpecified && !nodeAndTaskIdSpecified) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify either Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && nodeAndTaskIdSpecified && !task.equals(String.format("%s:%d", nodeId, taskId))) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, "Please specify only one of Node ID + task ID OR composite task parameters"); - } + if (taskSpecified + && nodeAndTaskIdSpecified + && !task.equals(String.format("%s:%d", nodeId, taskId))) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + "Please specify only one of Node ID + task ID OR composite task parameters"); + } - if (taskSpecified && !isTaskIdValid(task)) { - throw new RestLiServiceException(HttpStatus.S_400_BAD_REQUEST, - String.format("Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", task)); - } + if (taskSpecified && !isTaskIdValid(task)) { + throw new RestLiServiceException( + HttpStatus.S_400_BAD_REQUEST, + String.format( + "Task should be in the form nodeId:taskId e.g. aB1cdEf2GHIJKLMnoPQr3S:123456 (got %s)", + task)); + } - String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; - long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); - java.util.Optional res = _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); - JSONObject j = new JSONObject(); - if (res.isEmpty()) { - j.put("error", String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); - return j.toString(); - } - GetTaskResponse resp = res.get(); - j.put("completed", resp.isCompleted()); - j.put("taskId", res.get().getTaskInfo().getTaskId()); - j.put("status", res.get().getTaskInfo().getStatus()); - j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); - return j.toString(); - }, MetricRegistry.name(this.getClass(), "getTaskStatus")); + String nodeIdToQuery = nodeAndTaskIdSpecified ? nodeId : task.split(":")[0]; + long taskIdToQuery = nodeAndTaskIdSpecified ? taskId : Long.parseLong(task.split(":")[1]); + java.util.Optional res = + _systemMetadataService.getTaskStatus(nodeIdToQuery, taskIdToQuery); + JSONObject j = new JSONObject(); + if (res.isEmpty()) { + j.put( + "error", + String.format("Could not get task status for %s:%d", nodeIdToQuery, taskIdToQuery)); + return j.toString(); + } + GetTaskResponse resp = res.get(); + j.put("completed", resp.isCompleted()); + j.put("taskId", res.get().getTaskInfo().getTaskId()); + j.put("status", res.get().getTaskInfo().getStatus()); + j.put("runTimeNanos", res.get().getTaskInfo().getRunningTimeNanos()); + return j.toString(); + }, + MetricRegistry.name(this.getClass(), "getTaskStatus")); } @Action(name = ACTION_GET_INDEX_SIZES) @Nonnull @WithSpan public Task getIndexSizes() { - return RestliUtil.toTask(() -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); - } - TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); - result.setIndexSizes(new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); - return result; - }, MetricRegistry.name(this.getClass(), "getIndexSizes")); + return RestliUtil.toTask( + () -> { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); + } + TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); + result.setIndexSizes( + new TimeseriesIndexSizeResultArray(_timeseriesAspectService.getIndexSizes())); + return result; + }, + MetricRegistry.name(this.getClass(), "getIndexSizes")); } @VisibleForTesting @@ -184,13 +210,16 @@ String executeTruncateTimeseriesAspect( @Nullable Integer batchSize, @Nullable Long timeoutSeconds, @Nullable Boolean forceDeleteByQuery, - @Nullable Boolean forceReindex - ) { + @Nullable Boolean forceReindex) { Authentication authentication = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, _authorizer, ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), - List.of(java.util.Optional.empty()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); + && !isAuthorized( + authentication, + _authorizer, + ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), + List.of(java.util.Optional.empty()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); } if (forceDeleteByQuery != null && forceDeleteByQuery.equals(forceReindex)) { @@ -199,14 +228,20 @@ String executeTruncateTimeseriesAspect( List criteria = new ArrayList<>(); criteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.LESS_THAN_OR_EQUAL_TO)); final Filter filter = QueryUtils.getFilterFromCriteria(criteria); long numToDelete = _timeseriesAspectService.countByFilter(entityType, aspectName, filter); long totalNum = _timeseriesAspectService.countByFilter(entityType, aspectName, new Filter()); - String deleteSummary = String.format("Delete %d out of %d rows (%.2f%%). ", numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); - boolean reindex = !(forceDeleteByQuery != null && forceDeleteByQuery) && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); + String deleteSummary = + String.format( + "Delete %d out of %d rows (%.2f%%). ", + numToDelete, totalNum, ((double) numToDelete) / totalNum * 100); + boolean reindex = + !(forceDeleteByQuery != null && forceDeleteByQuery) + && ((forceReindex != null && forceReindex) || numToDelete > (totalNum / 2)); if (reindex) { deleteSummary += "Reindexing the aspect without the deleted records. "; @@ -232,17 +267,22 @@ String executeTruncateTimeseriesAspect( } if (reindex) { - // need to invert query to retain only the ones that do NOT meet the criterion from the count + // need to invert query to retain only the ones that do NOT meet the criterion from the + // count List reindexCriteria = new ArrayList<>(); reindexCriteria.add( - QueryUtils.newCriterion("timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); + QueryUtils.newCriterion( + "timestampMillis", String.valueOf(endTimeMillis), Condition.GREATER_THAN)); final Filter reindexFilter = QueryUtils.getFilterFromCriteria(reindexCriteria); - String taskId = _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); + String taskId = + _timeseriesAspectService.reindexAsync(entityType, aspectName, reindexFilter, options); log.info("reindex request submitted with ID " + taskId); return taskId; } else { - String taskId = _timeseriesAspectService.deleteAspectValuesAsync(entityType, aspectName, filter, options); + String taskId = + _timeseriesAspectService.deleteAspectValuesAsync( + entityType, aspectName, filter, options); log.info("delete by query request submitted with ID " + taskId); return taskId; } @@ -260,10 +300,18 @@ public Task truncateTimeseriesAspect( @ActionParam(PARAM_BATCH_SIZE) @Optional @Nullable Integer batchSize, @ActionParam(PARAM_TIMEOUT_SECONDS) @Optional @Nullable Long timeoutSeconds, @ActionParam(PARAM_FORCE_DELETE_BY_QUERY) @Optional @Nullable Boolean forceDeleteByQuery, - @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex - ) { - return RestliUtil.toTask(() -> - executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, dryRun, batchSize, timeoutSeconds, forceDeleteByQuery, forceReindex), + @ActionParam(PARAM_FORCE_REINDEX) @Optional @Nullable Boolean forceReindex) { + return RestliUtil.toTask( + () -> + executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + dryRun, + batchSize, + timeoutSeconds, + forceDeleteByQuery, + forceReindex), MetricRegistry.name(this.getClass(), "truncateTimeseriesAspect")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java index 12586b66495a9..bf07d0eb9dd5b 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.operations; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; @@ -19,14 +22,11 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - @Slf4j public class Utils { - private Utils() { } + private Utils() {} + public static String restoreIndices( @Nonnull String aspectName, @Nullable String urn, @@ -34,8 +34,7 @@ public static String restoreIndices( @Nullable Integer start, @Nullable Integer batchSize, @Nonnull Authorizer authorizer, - @Nonnull EntityService entityService - ) { + @Nonnull EntityService entityService) { Authentication authentication = AuthenticationContext.getAuthentication(); EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { @@ -43,16 +42,21 @@ public static String restoreIndices( resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(authentication, authorizer, ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), - resourceSpec)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); + && !isAuthorized( + authentication, + authorizer, + ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), + resourceSpec)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); } - RestoreIndicesArgs args = new RestoreIndicesArgs() - .setAspectName(aspectName) - .setUrnLike(urnLike) - .setUrn(urn) - .setStart(start) - .setBatchSize(batchSize); + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .setAspectName(aspectName) + .setUrnLike(urnLike) + .setUrn(urn) + .setStart(start) + .setBatchSize(batchSize); Map result = new HashMap<>(); result.put("args", args); result.put("result", entityService.restoreIndices(args, log::info)); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java index a8018074497c4..f4bc0dd72e4c6 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java @@ -1,9 +1,12 @@ package com.linkedin.metadata.resources.platform; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.entity.Entity; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -24,13 +27,7 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * DataHub Platform Actions - */ +/** DataHub Platform Actions */ @Slf4j @RestLiCollection(name = "platform", namespace = "com.linkedin.platform") public class PlatformResource extends CollectionResourceTaskTemplate { @@ -54,14 +51,19 @@ public Task producePlatformEvent( @ActionParam("event") @Nonnull PlatformEvent event) { Authentication auth = AuthenticationContext.getAuthentication(); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to produce platform events."); + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to produce platform events."); } log.info(String.format("Emitting platform event. name: %s, key: %s", eventName, key)); - return RestliUtil.toTask(() -> { - _eventProducer.producePlatformEvent(eventName, key, event); - return null; - }); + return RestliUtil.toTask( + () -> { + _eventProducer.producePlatformEvent(eventName, key, event); + return null; + }); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java index 270c52f380356..af6efb1ad8093 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliConstants.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.resources.restli; public final class RestliConstants { - private RestliConstants() { } + private RestliConstants() {} public static final String FINDER_SEARCH = "search"; public static final String FINDER_FILTER = "filter"; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java index 9949556c99b81..278cd48bc455e 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java @@ -18,7 +18,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtils { private RestliUtils() { @@ -26,8 +25,9 @@ private RestliUtils() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -38,7 +38,8 @@ public static Task toTask(@Nonnull Supplier supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -51,8 +52,10 @@ public static Task toTask(@Nonnull Supplier supplier) { } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -81,22 +84,36 @@ public static RestLiServiceException invalidArgumentsException(@Nullable String return new RestLiServiceException(HttpStatus.S_412_PRECONDITION_FAILED, message); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List privileges, @Nonnull final List> resources) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List privileges, + @Nonnull final List> resources) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorizedForResources(authorizer, authentication.getActor().toUrnStr(), resources, orGroup); + return AuthUtil.isAuthorizedForResources( + authorizer, authentication.getActor().toUrnStr(), resources, orGroup); } - public static boolean isAuthorized(@Nonnull Authentication authentication, @Nonnull Authorizer authorizer, - @Nonnull final List privileges, @Nullable final EntitySpec resource) { + public static boolean isAuthorized( + @Nonnull Authentication authentication, + @Nonnull Authorizer authorizer, + @Nonnull final List privileges, + @Nullable final EntitySpec resource) { DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorized(authorizer, authentication.getActor().toUrnStr(), java.util.Optional.ofNullable(resource), orGroup); + return AuthUtil.isAuthorized( + authorizer, + authentication.getActor().toUrnStr(), + java.util.Optional.ofNullable(resource), + orGroup); } - private static DisjunctivePrivilegeGroup convertPrivilegeGroup(@Nonnull final List privileges) { + private static DisjunctivePrivilegeGroup convertPrivilegeGroup( + @Nonnull final List privileges) { return new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(privileges.stream() - .map(PoliciesConfig.Privilege::getType) - .collect(Collectors.toList())))); + ImmutableList.of( + new ConjunctivePrivilegeGroup( + privileges.stream() + .map(PoliciesConfig.Privilege::getType) + .collect(Collectors.toList())))); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index 02d413301f3b4..554b6e909e9e3 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -1,10 +1,13 @@ package com.linkedin.metadata.resources.usage; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.resources.restli.RestliUtils.*; + import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; @@ -67,22 +70,23 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; - - -/** - * Rest.li entry point: /usageStats - */ +/** Rest.li entry point: /usageStats */ @Slf4j @Deprecated @RestLiSimpleResource(name = "usageStats", namespace = "com.linkedin.usage") public class UsageStats extends SimpleResourceTemplate { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - OBJECT_MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + OBJECT_MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); } + private static final String ACTION_BATCH_INGEST = "batchIngest"; private static final String PARAM_BUCKETS = "buckets"; @@ -122,18 +126,24 @@ public class UsageStats extends SimpleResourceTemplate { @WithSpan public Task batchIngest(@ActionParam(PARAM_BUCKETS) @Nonnull UsageAggregation[] buckets) { log.info("Ingesting {} usage stats aggregations", buckets.length); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), (EntitySpec) null)) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to edit entities."); - } - for (UsageAggregation agg : buckets) { - this.ingest(agg); - } - return null; - }, MetricRegistry.name(this.getClass(), "batchIngest")); + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), + (EntitySpec) null)) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); + } + for (UsageAggregation agg : buckets) { + this.ingest(agg); + } + return null; + }, + MetricRegistry.name(this.getClass(), "batchIngest")); } private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { @@ -153,35 +163,50 @@ private CalendarInterval windowToInterval(@Nonnull WindowDuration duration) { } } - private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String resource, - @Nonnull WindowDuration duration) { - // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation because - // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field collections, and - // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` part of the results + private UsageAggregationArray getBuckets( + @Nonnull Filter filter, @Nonnull String resource, @Nonnull WindowDuration duration) { + // NOTE: We will not populate the per-bucket userCounts and fieldCounts in this implementation + // because + // (a) it is very expensive to compute the un-explode equivalent queries for timeseries field + // collections, and + // (b) the equivalent data for the whole query will anyways be populated in the `aggregations` + // part of the results // (see getAggregations). - // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & topSqlQueries. + // 1. Construct the aggregation specs for latest value of uniqueUserCount, totalSqlQueries & + // topSqlQueries. AggregationSpec uniqueUserCountAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("uniqueUserCount"); AggregationSpec totalSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("totalSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("totalSqlQueries"); AggregationSpec topSqlQueriesAgg = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("topSqlQueries"); + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("topSqlQueries"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; + new AggregationSpec[] {uniqueUserCountAgg, totalSqlQueriesAgg, topSqlQueriesAgg}; // 2. Construct the Grouping buckets with just the ts bucket. GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(windowToInterval(duration))); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{timestampBucket}; + GroupingBucket[] groupingBuckets = new GroupingBucket[] {timestampBucket}; // 3. Query GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // 4. Populate buckets from the result. UsageAggregationArray buckets = new UsageAggregationArray(); @@ -211,9 +236,11 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String } if (!row.get(3).equals(ES_NULL_VALUE)) { try { - usageAggregationMetrics.setTopSqlQueries(OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); + usageAggregationMetrics.setTopSqlQueries( + OBJECT_MAPPER.readValue(row.get(3), StringArray.class)); } catch (JsonProcessingException e) { - throw new IllegalArgumentException("Failed to convert topSqlQueries from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert topSqlQueries from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -226,20 +253,31 @@ private UsageAggregationArray getBuckets(@Nonnull Filter filter, @Nonnull String private List getUserUsageCounts(Filter filter) { // Sum aggregation on userCounts.count AggregationSpec sumUserCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.count"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.count"); AggregationSpec latestUserEmailAggSpec = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("userCounts.userEmail"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumUserCountsCountAggSpec, latestUserEmailAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("userCounts.userEmail"); + AggregationSpec[] aggregationSpecs = + new AggregationSpec[] {sumUserCountsCountAggSpec, latestUserEmailAggSpec}; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -253,7 +291,8 @@ private List getUserUsageCounts(Filter filter) { try { userUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE)) { @@ -267,18 +306,26 @@ private List getUserUsageCounts(Filter filter) { private List getFieldUsageCounts(Filter filter) { // Sum aggregation on fieldCounts.count AggregationSpec sumFieldCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("fieldCounts.count"); - AggregationSpec[] aggregationSpecs = new AggregationSpec[]{sumFieldCountAggSpec}; + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("fieldCounts.count"); + AggregationSpec[] aggregationSpecs = new AggregationSpec[] {sumFieldCountAggSpec}; // String grouping bucket on fieldCounts.fieldName GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("fieldCounts.fieldPath").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("fieldCounts.fieldPath") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend GenericTable result = - _timeseriesAspectService.getAggregatedStats(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, aggregationSpecs, - filter, groupingBuckets); + _timeseriesAspectService.getAggregatedStats( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List fieldUsageCounts = new ArrayList<>(); @@ -289,7 +336,8 @@ private List getFieldUsageCounts(Filter filter) { try { fieldUsageCount.setCount(Integer.valueOf(row.get(1))); } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert field usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert field usage count from ES to int", e); } } fieldUsageCounts.add(fieldUsageCount); @@ -312,80 +360,100 @@ private UsageQueryResultAggregations getAggregations(Filter filter) { @Action(name = ACTION_QUERY) @Nonnull @WithSpan - public Task query(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, + public Task query( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, - @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional Long startTime, + @ActionParam(PARAM_START_TIME) @com.linkedin.restli.server.annotations.Optional + Long startTime, @ActionParam(PARAM_END_TIME) @com.linkedin.restli.server.annotations.Optional Long endTime, - @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional Integer maxBuckets) { + @ActionParam(PARAM_MAX_BUCKETS) @com.linkedin.restli.server.annotations.Optional + Integer maxBuckets) { log.info("Attempting to query usage stats"); - return RestliUtil.toTask(() -> { - Authentication auth = AuthenticationContext.getAuthentication(); - Urn resourceUrn = UrnUtils.getUrn(resource); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), - new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); - } - // 1. Populate the filter. This is common for all queries. - Filter filter = new Filter(); - ArrayList criteria = new ArrayList<>(); - Criterion hasUrnCriterion = new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); - criteria.add(hasUrnCriterion); - if (startTime != null) { - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTime.toString()); - criteria.add(startTimeCriterion); - } - if (endTime != null) { - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTime.toString()); - criteria.add(endTimeCriterion); - } - - filter.setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); - - // 2. Get buckets. - UsageAggregationArray buckets = getBuckets(filter, resource, duration); - - // 3. Get aggregations. - UsageQueryResultAggregations aggregations = getAggregations(filter); - - // 4. Compute totalSqlQuery count from the buckets itself. - // We want to avoid issuing an additional query with a sum aggregation. - Integer totalQueryCount = null; - for (UsageAggregation bucket : buckets) { - if (bucket.getMetrics().getTotalSqlQueries() != null) { - if (totalQueryCount == null) { - totalQueryCount = 0; + return RestliUtil.toTask( + () -> { + Authentication auth = AuthenticationContext.getAuthentication(); + Urn resourceUrn = UrnUtils.getUrn(resource); + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); + } + // 1. Populate the filter. This is common for all queries. + Filter filter = new Filter(); + ArrayList criteria = new ArrayList<>(); + Criterion hasUrnCriterion = + new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); + criteria.add(hasUrnCriterion); + if (startTime != null) { + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(startTime.toString()); + criteria.add(startTimeCriterion); + } + if (endTime != null) { + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(endTime.toString()); + criteria.add(endTimeCriterion); } - totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); - } - } - if (totalQueryCount != null) { - aggregations.setTotalSqlQueries(totalQueryCount); - } + filter.setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + + // 2. Get buckets. + UsageAggregationArray buckets = getBuckets(filter, resource, duration); + + // 3. Get aggregations. + UsageQueryResultAggregations aggregations = getAggregations(filter); + + // 4. Compute totalSqlQuery count from the buckets itself. + // We want to avoid issuing an additional query with a sum aggregation. + Integer totalQueryCount = null; + for (UsageAggregation bucket : buckets) { + if (bucket.getMetrics().getTotalSqlQueries() != null) { + if (totalQueryCount == null) { + totalQueryCount = 0; + } + totalQueryCount += bucket.getMetrics().getTotalSqlQueries(); + } + } - // 5. Populate and return the result. - return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); - }, MetricRegistry.name(this.getClass(), "query")); + if (totalQueryCount != null) { + aggregations.setTotalSqlQueries(totalQueryCount); + } + + // 5. Populate and return the result. + return new UsageQueryResult().setBuckets(buckets).setAggregations(aggregations); + }, + MetricRegistry.name(this.getClass(), "query")); } @Action(name = ACTION_QUERY_RANGE) @Nonnull @WithSpan - public Task queryRange(@ActionParam(PARAM_RESOURCE) @Nonnull String resource, - @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, @ActionParam(PARAM_RANGE) UsageTimeRange range) { + public Task queryRange( + @ActionParam(PARAM_RESOURCE) @Nonnull String resource, + @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, + @ActionParam(PARAM_RANGE) UsageTimeRange range) { Authentication auth = AuthenticationContext.getAuthentication(); Urn resourceUrn = UrnUtils.getUrn(resource); if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized(auth, _authorizer, ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + && !isAuthorized( + auth, + _authorizer, + ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { - throw new RestLiServiceException(HttpStatus.S_401_UNAUTHORIZED, - "User is unauthorized to query usage."); + throw new RestLiServiceException( + HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); } final long now = Instant.now().toEpochMilli(); return this.query(resource, duration, convertRangeToStartTime(range, now), now, null); @@ -418,7 +486,8 @@ private void ingest(@Nonnull UsageAggregation bucket) { datasetUsageStatistics.setUserCounts(datasetUserUsageCountsArray); } if (aggregationMetrics.hasFields()) { - DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = new DatasetFieldUsageCountsArray(); + DatasetFieldUsageCountsArray datasetFieldUsageCountsArray = + new DatasetFieldUsageCountsArray(); for (FieldUsageCounts f : aggregationMetrics.getFields()) { DatasetFieldUsageCounts datasetFieldUsageCounts = new DatasetFieldUsageCounts(); datasetFieldUsageCounts.setFieldPath(f.getFieldName()); @@ -431,17 +500,23 @@ private void ingest(@Nonnull UsageAggregation bucket) { Map documents; try { documents = - TimeseriesAspectTransformer.transform(bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), - null); + TimeseriesAspectTransformer.transform( + bucket.getResource(), datasetUsageStatistics, getUsageStatsAspectSpec(), null); } catch (JsonProcessingException e) { log.error("Failed to generate timeseries document from aspect: {}", e.toString()); return; } // 3. Upsert the exploded documents to timeseries aspect service. - documents.entrySet().forEach(document -> { - _timeseriesAspectService.upsertDocument(USAGE_STATS_ENTITY_NAME, USAGE_STATS_ASPECT_NAME, document.getKey(), - document.getValue()); - }); + documents + .entrySet() + .forEach( + document -> { + _timeseriesAspectService.upsertDocument( + USAGE_STATS_ENTITY_NAME, + USAGE_STATS_ASPECT_NAME, + document.getKey(), + document.getValue()); + }); } @Nonnull diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java index 351a3d8f24e36..d6eeb1a01ac15 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.resources.entity; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -15,9 +18,9 @@ import com.linkedin.metadata.config.PreProcessHooks; import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.entity.ebean.transactions.UpsertBatchItem; -import com.linkedin.metadata.entity.EntityServiceImpl; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; @@ -27,15 +30,10 @@ import com.linkedin.mxe.MetadataChangeProposal; import java.net.URISyntaxException; import java.util.List; - import mock.MockEntityRegistry; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; - - public class AspectResourceTest { private AspectResource _aspectResource; private EntityService _entityService; @@ -54,7 +52,9 @@ public void setup() { _entityRegistry = new MockEntityRegistry(); _updateIndicesService = mock(UpdateIndicesService.class); _preProcessHooks = mock(PreProcessHooks.class); - _entityService = new EntityServiceImpl(_aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); + _entityService = + new EntityServiceImpl( + _aspectDao, _producer, _entityRegistry, false, _updateIndicesService, _preProcessHooks); _authorizer = mock(Authorizer.class); _aspectResource.setAuthorizer(_authorizer); _aspectResource.setEntityService(_entityService); @@ -82,36 +82,49 @@ public void testAsyncDefaultAspects() throws URISyntaxException { reset(_producer, _aspectDao); - UpsertBatchItem req = UpsertBatchItem.builder() + UpsertBatchItem req = + UpsertBatchItem.builder() .urn(urn) .aspectName(mcp.getAspectName()) .aspect(mcp.getAspect()) .metadataChangeProposal(mcp) .build(_entityRegistry); when(_aspectDao.runInTransactionWithRetry(any(), any(), anyInt())) - .thenReturn(List.of( - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name1")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name2")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name3")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name4")) - .auditStamp(new AuditStamp()) - .request(req).build(), - UpdateAspectResult.builder().urn(urn) - .newValue(new DatasetProperties().setName("name5")) - .auditStamp(new AuditStamp()) - .request(req).build())); + .thenReturn( + List.of( + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name1")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name2")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name3")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name4")) + .auditStamp(new AuditStamp()) + .request(req) + .build(), + UpdateAspectResult.builder() + .urn(urn) + .newValue(new DatasetProperties().setName("name5")) + .auditStamp(new AuditStamp()) + .request(req) + .build())); _aspectResource.ingestProposal(mcp, "false"); - verify(_producer, times(5)).produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); + verify(_producer, times(5)) + .produceMetadataChangeLog(eq(urn), any(AspectSpec.class), any(MetadataChangeLog.class)); verifyNoMoreInteractions(_producer); } } diff --git a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java index 470c6e87040ec..bdfe906f42af9 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java +++ b/metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/operations/OperationsResourceTest.java @@ -1,18 +1,16 @@ package com.linkedin.metadata.resources.operations; +import static org.testng.AssertJUnit.*; + import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.util.Pair; import java.util.List; import mock.MockTimeseriesAspectService; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.*; - - public class OperationsResourceTest { private static final String TASK_ID = "taskId123"; - @Test public void testDryRun() { TimeseriesAspectService mockTimeseriesAspectService = new MockTimeseriesAspectService(); @@ -20,11 +18,13 @@ public void testDryRun() { String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; OperationsResource testResource = new OperationsResource(mockTimeseriesAspectService); - String output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, null, - null, null, null); + String output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, null, null); assertTrue(output.contains("This was a dry run")); - output = testResource.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, false, null, - null, null, null); + output = + testResource.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, false, null, null, null, null); assertEquals(TASK_ID, output); } @@ -42,59 +42,113 @@ public void testIsTaskIdValid() { @Test public void testForceFlags() { final String reindexTaskId = "REINDEX_TASK_ID"; - TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = new MockTimeseriesAspectService(); - TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = new MockTimeseriesAspectService(30, 20, reindexTaskId); + TimeseriesAspectService mockTimeseriesAspectServiceWouldDeleteByQuery = + new MockTimeseriesAspectService(); + TimeseriesAspectService mockTimeseriesAspectServiceWouldReindex = + new MockTimeseriesAspectService(30, 20, reindexTaskId); String entityType = "dataset"; String aspectName = "datasetusagestatistics"; long endTimeMillis = 3000; - OperationsResource testResourceWouldReindex = new OperationsResource(mockTimeseriesAspectServiceWouldReindex); - OperationsResource testResourceWouldDeleteByQuery = new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); + OperationsResource testResourceWouldReindex = + new OperationsResource(mockTimeseriesAspectServiceWouldReindex); + OperationsResource testResourceWouldDeleteByQuery = + new OperationsResource(mockTimeseriesAspectServiceWouldDeleteByQuery); - String result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, true, true); + String result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, true, true); String errorIfFlagsAreIncompatable = "please only set forceReindex OR forceDeleteByQuery flags"; assertEquals(errorIfFlagsAreIncompatable, result); - - result = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, false, false); + result = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, aspectName, endTimeMillis, true, null, null, false, false); assertEquals(errorIfFlagsAreIncompatable, result); - - List> - validOptionsNothingForced = List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); + List> validOptionsNothingForced = + List.of(Pair.of(null, null), Pair.of(null, false), Pair.of(false, null)); for (Pair values : validOptionsNothingForced) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, reindexResult); assertTrue(reindexResult.contains("Reindexing the aspect without the deleted records")); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); assertNotSame(errorIfFlagsAreIncompatable, deleteResult); assertTrue(deleteResult.contains("Issuing a delete by query request. ")); } - List> validOptionsForceDeleteByQuery = List.of(Pair.of(true, null), Pair.of(true, false)); + List> validOptionsForceDeleteByQuery = + List.of(Pair.of(true, null), Pair.of(true, false)); for (Pair values : validOptionsForceDeleteByQuery) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Issuing a delete by query request. ")); } } - List> validOptionsForceReindex = List.of(Pair.of(null, true), Pair.of(false, true)); + List> validOptionsForceReindex = + List.of(Pair.of(null, true), Pair.of(false, true)); for (Pair values : validOptionsForceReindex) { - String reindexResult = testResourceWouldReindex.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); - String deleteResult = testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect(entityType, aspectName, endTimeMillis, true, - null, null, values.getFirst(), values.getSecond()); + String reindexResult = + testResourceWouldReindex.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); + String deleteResult = + testResourceWouldDeleteByQuery.executeTruncateTimeseriesAspect( + entityType, + aspectName, + endTimeMillis, + true, + null, + null, + values.getFirst(), + values.getSecond()); for (String res : List.of(reindexResult, deleteResult)) { assertNotSame(errorIfFlagsAreIncompatable, res); assertTrue(res.contains("Reindexing the aspect without the deleted records")); } } } -} \ No newline at end of file +} diff --git a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java index 81d2bbd88b3e6..2a12ecf6866bb 100644 --- a/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java +++ b/metadata-service/restli-servlet-impl/src/test/java/mock/MockTimeseriesAspectService.java @@ -16,7 +16,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockTimeseriesAspectService implements TimeseriesAspectService { public static final long DEFAULT_COUNT = 30; @@ -32,6 +31,7 @@ public MockTimeseriesAspectService() { this._filteredCount = DEFAULT_FILTERED_COUNT; this._taskId = DEFAULT_TASK_ID; } + public MockTimeseriesAspectService(long count, long filteredCount, String taskId) { this._count = count; this._filteredCount = filteredCount; @@ -39,12 +39,11 @@ public MockTimeseriesAspectService(long count, long filteredCount, String taskId } @Override - public void configure() { - - } + public void configure() {} @Override - public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { + public long countByFilter( + @Nonnull String entityName, @Nonnull String aspectName, @Nullable Filter filter) { if (filter != null && !filter.equals(new Filter())) { return _filteredCount; } @@ -53,36 +52,51 @@ public long countByFilter(@Nonnull String entityName, @Nonnull String aspectName @Nonnull @Override - public List getAspectValues(@Nonnull Urn urn, @Nonnull String entityName, - @Nonnull String aspectName, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis, - @Nullable Integer limit, @Nullable Filter filter, @Nullable SortCriterion sort) { + public List getAspectValues( + @Nonnull Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis, + @Nullable Integer limit, + @Nullable Filter filter, + @Nullable SortCriterion sort) { return List.of(); } @Nonnull @Override - public GenericTable getAggregatedStats(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull AggregationSpec[] aggregationSpecs, @Nullable Filter filter, + public GenericTable getAggregatedStats( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull AggregationSpec[] aggregationSpecs, + @Nullable Filter filter, @Nullable GroupingBucket[] groupingBuckets) { return new GenericTable(); } @Nonnull @Override - public DeleteAspectValuesResult deleteAspectValues(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter) { + public DeleteAspectValuesResult deleteAspectValues( + @Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter) { return new DeleteAspectValuesResult(); } @Nonnull @Override - public String deleteAspectValuesAsync(@Nonnull String entityName, @Nonnull String aspectName, - @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { + public String deleteAspectValuesAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, + @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @Override - public String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + public String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options) { return _taskId; } @@ -94,10 +108,11 @@ public DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull String runId) } @Override - public void upsertDocument(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull String docId, - @Nonnull JsonNode document) { - - } + public void upsertDocument( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull String docId, + @Nonnull JsonNode document) {} @Override public List getIndexSizes() { diff --git a/metadata-service/schema-registry-api/build.gradle b/metadata-service/schema-registry-api/build.gradle index 290126836eb4a..077d7d4f2d6a4 100644 --- a/metadata-service/schema-registry-api/build.gradle +++ b/metadata-service/schema-registry-api/build.gradle @@ -45,5 +45,3 @@ tasks.register('generateOpenApiPojos', GenerateSwaggerCode) { sourceSets.main.java.srcDirs "${generateOpenApiPojos.outputDir}/src/main/java" sourceSets.main.resources.srcDir "${generateOpenApiPojos.outputDir}/src/main/resources" -// Disable checkstyle for this module. -checkstyleMain.source = "${projectDir}/src/main/java" diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java index dc7fd5e20d9cd..58058dc3332b0 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/CompatibilityCheckResponse.java @@ -1,24 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Compatibility check response - */ +/** Compatibility check response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Compatibility check response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class CompatibilityCheckResponse { +public class CompatibilityCheckResponse { @JsonProperty("is_compatible") private Boolean isCompatible = null; @@ -34,11 +32,12 @@ public CompatibilityCheckResponse isCompatible(Boolean isCompatible) { /** * Whether the compared schemas are compatible + * * @return isCompatible - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Whether the compared schemas are compatible") - - public Boolean isIsCompatible() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Whether the compared schemas are compatible") + public Boolean isIsCompatible() { return isCompatible; } @@ -61,11 +60,11 @@ public CompatibilityCheckResponse addMessagesItem(String messagesItem) { /** * Error messages + * * @return messages - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "[]", description = "Error messages") - - public List getMessages() { + public List getMessages() { return messages; } @@ -73,7 +72,6 @@ public void setMessages(List messages) { this.messages = messages; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +81,8 @@ public boolean equals(java.lang.Object o) { return false; } CompatibilityCheckResponse compatibilityCheckResponse = (CompatibilityCheckResponse) o; - return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) && - Objects.equals(this.messages, compatibilityCheckResponse.messages); + return Objects.equals(this.isCompatible, compatibilityCheckResponse.isCompatible) + && Objects.equals(this.messages, compatibilityCheckResponse.messages); } @Override @@ -96,7 +94,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class CompatibilityCheckResponse {\n"); - + sb.append(" isCompatible: ").append(toIndentedString(isCompatible)).append("\n"); sb.append(" messages: ").append(toIndentedString(messages)).append("\n"); sb.append("}"); @@ -104,8 +102,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java index 9e338b232e8da..0a223a88cfd33 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Config.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config - */ +/** Config */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Config { +public class Config { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityLevelEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityLevelEnum fromValue(String text) { return null; } } + @JsonProperty("compatibilityLevel") private CompatibilityLevelEnum compatibilityLevel = null; @@ -69,11 +66,13 @@ public Config compatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { /** * Compatibility Level + * * @return compatibilityLevel - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityLevelEnum getCompatibilityLevel() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityLevelEnum getCompatibilityLevel() { return compatibilityLevel; } @@ -81,7 +80,6 @@ public void setCompatibilityLevel(CompatibilityLevelEnum compatibilityLevel) { this.compatibilityLevel = compatibilityLevel; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Config {\n"); - + sb.append(" compatibilityLevel: ").append(toIndentedString(compatibilityLevel)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java index 5b586e184c6ce..b179149b33d01 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ConfigUpdateRequest.java @@ -1,40 +1,36 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Config update request - */ +/** Config update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Config update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ConfigUpdateRequest { +public class ConfigUpdateRequest { - /** - * Compatibility Level - */ + /** Compatibility Level */ public enum CompatibilityEnum { BACKWARD("BACKWARD"), - + BACKWARD_TRANSITIVE("BACKWARD_TRANSITIVE"), - + FORWARD("FORWARD"), - + FORWARD_TRANSITIVE("FORWARD_TRANSITIVE"), - + FULL("FULL"), - + FULL_TRANSITIVE("FULL_TRANSITIVE"), - + NONE("NONE"); private String value; @@ -59,6 +55,7 @@ public static CompatibilityEnum fromValue(String text) { return null; } } + @JsonProperty("compatibility") private CompatibilityEnum compatibility = null; @@ -69,11 +66,13 @@ public ConfigUpdateRequest compatibility(CompatibilityEnum compatibility) { /** * Compatibility Level + * * @return compatibility - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "FULL_TRANSITIVE", description = "Compatibility Level") - - public CompatibilityEnum getCompatibility() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "FULL_TRANSITIVE", + description = "Compatibility Level") + public CompatibilityEnum getCompatibility() { return compatibility; } @@ -81,7 +80,6 @@ public void setCompatibility(CompatibilityEnum compatibility) { this.compatibility = compatibility; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -103,15 +101,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConfigUpdateRequest {\n"); - + sb.append(" compatibility: ").append(toIndentedString(compatibility)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -119,4 +116,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java index f462d359bdea6..2f20d77b66137 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ErrorMessage.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Error message - */ +/** Error message */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error message") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ErrorMessage { +public class ErrorMessage { @JsonProperty("error_code") private Integer errorCode = null; @@ -31,11 +28,11 @@ public ErrorMessage errorCode(Integer errorCode) { /** * Error code + * * @return errorCode - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Error code") - - public Integer getErrorCode() { + public Integer getErrorCode() { return errorCode; } @@ -50,11 +47,11 @@ public ErrorMessage message(String message) { /** * Detailed error message + * * @return message - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Detailed error message") - - public String getMessage() { + public String getMessage() { return message; } @@ -62,7 +59,6 @@ public void setMessage(String message) { this.message = message; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } ErrorMessage errorMessage = (ErrorMessage) o; - return Objects.equals(this.errorCode, errorMessage.errorCode) && - Objects.equals(this.message, errorMessage.message); + return Objects.equals(this.errorCode, errorMessage.errorCode) + && Objects.equals(this.message, errorMessage.message); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ErrorMessage {\n"); - + sb.append(" errorCode: ").append(toIndentedString(errorCode)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java index 2c6642c97c507..5a418401278d3 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Mode.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema Registry operating mode - */ +/** Schema Registry operating mode */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema Registry operating mode") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Mode { +public class Mode { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public Mode mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Mode {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java index c2fffea0034f9..2cbbe4d5351d8 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ModeUpdateRequest.java @@ -1,34 +1,30 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; -import com.fasterxml.jackson.annotation.JsonValue; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonValue; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Mode update request - */ +/** Mode update request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Mode update request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ModeUpdateRequest { +public class ModeUpdateRequest { - /** - * Schema Registry operating mode - */ + /** Schema Registry operating mode */ public enum ModeEnum { READWRITE("READWRITE"), - + READONLY("READONLY"), - + READONLY_OVERRIDE("READONLY_OVERRIDE"), - + IMPORT("IMPORT"); private String value; @@ -53,6 +49,7 @@ public static ModeEnum fromValue(String text) { return null; } } + @JsonProperty("mode") private ModeEnum mode = null; @@ -63,11 +60,13 @@ public ModeUpdateRequest mode(ModeEnum mode) { /** * Schema Registry operating mode + * * @return mode - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "READWRITE", description = "Schema Registry operating mode") - - public ModeEnum getMode() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "READWRITE", + description = "Schema Registry operating mode") + public ModeEnum getMode() { return mode; } @@ -75,7 +74,6 @@ public void setMode(ModeEnum mode) { this.mode = mode; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -97,15 +95,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ModeUpdateRequest {\n"); - + sb.append(" mode: ").append(toIndentedString(mode)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +110,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java index 4f535f343f433..d7b2b28123b6f 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaRequest.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register request - */ +/** Schema register request */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register request") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaRequest { +public class RegisterSchemaRequest { @JsonProperty("version") private Integer version = null; @@ -44,11 +41,11 @@ public RegisterSchemaRequest version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -63,11 +60,12 @@ public RegisterSchemaRequest id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -82,11 +80,11 @@ public RegisterSchemaRequest schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -109,11 +107,12 @@ public RegisterSchemaRequest addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List getReferences() { + @Valid + public List getReferences() { return references; } @@ -128,11 +127,11 @@ public RegisterSchemaRequest schema(String schema) { /** * Schema definition string + * * @return schema - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition string") - - public String getSchema() { + public String getSchema() { return schema; } @@ -140,7 +139,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -150,11 +148,11 @@ public boolean equals(java.lang.Object o) { return false; } RegisterSchemaRequest registerSchemaRequest = (RegisterSchemaRequest) o; - return Objects.equals(this.version, registerSchemaRequest.version) && - Objects.equals(this.id, registerSchemaRequest.id) && - Objects.equals(this.schemaType, registerSchemaRequest.schemaType) && - Objects.equals(this.references, registerSchemaRequest.references) && - Objects.equals(this.schema, registerSchemaRequest.schema); + return Objects.equals(this.version, registerSchemaRequest.version) + && Objects.equals(this.id, registerSchemaRequest.id) + && Objects.equals(this.schemaType, registerSchemaRequest.schemaType) + && Objects.equals(this.references, registerSchemaRequest.references) + && Objects.equals(this.schema, registerSchemaRequest.schema); } @Override @@ -166,7 +164,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaRequest {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); @@ -177,8 +175,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -186,4 +183,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java index 7cdcb1093f34b..54e480078233b 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/RegisterSchemaResponse.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema register response - */ +/** Schema register response */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema register response") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class RegisterSchemaResponse { +public class RegisterSchemaResponse { @JsonProperty("id") private Integer id = null; @@ -28,11 +25,13 @@ public RegisterSchemaResponse id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -40,7 +39,6 @@ public void setId(Integer id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -62,15 +60,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class RegisterSchemaResponse {\n"); - + sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); return sb.toString(); } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -78,4 +75,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java index b3ca087bdc5f3..cc4d5e7694976 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/Schema.java @@ -8,14 +8,14 @@ import javax.validation.Valid; import org.springframework.validation.annotation.Validated; -/** - * Schema - */ +/** Schema */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class Schema { +public class Schema { @JsonProperty("subject") private String subject = null; @@ -43,11 +43,11 @@ public Schema subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -62,11 +62,11 @@ public Schema version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -81,11 +81,13 @@ public Schema id(Integer id) { /** * Globally unique identifier of the schema + * * @return id - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "100001", description = "Globally unique identifier of the schema") - - public Integer getId() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "100001", + description = "Globally unique identifier of the schema") + public Integer getId() { return id; } @@ -100,11 +102,11 @@ public Schema schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -127,11 +129,12 @@ public Schema addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List getReferences() { + @Valid + public List getReferences() { return references; } @@ -146,11 +149,13 @@ public Schema schema(String schema) { /** * Schema definition string + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema definition string") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema definition string") + public String getSchema() { return schema; } @@ -158,7 +163,6 @@ public void setSchema(String schema) { this.schema = schema; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -168,12 +172,12 @@ public boolean equals(java.lang.Object o) { return false; } Schema schema = (Schema) o; - return Objects.equals(this.subject, schema.subject) && - Objects.equals(this.version, schema.version) && - Objects.equals(this.id, schema.id) && - Objects.equals(this.schemaType, schema.schemaType) && - Objects.equals(this.references, schema.references) && - Objects.equals(this.schema, schema.schema); + return Objects.equals(this.subject, schema.subject) + && Objects.equals(this.version, schema.version) + && Objects.equals(this.id, schema.id) + && Objects.equals(this.schemaType, schema.schemaType) + && Objects.equals(this.references, schema.references) + && Objects.equals(this.schema, schema.schema); } @Override @@ -185,7 +189,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class Schema {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); @@ -197,8 +201,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -206,4 +209,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java index 96fb685dc1bfc..a2dffa59778ed 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaReference.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema reference - */ +/** Schema reference */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema reference") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaReference { +public class SchemaReference { @JsonProperty("name") private String name = null; @@ -34,11 +31,13 @@ public SchemaReference name(String name) { /** * Reference name + * * @return name - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "io.confluent.kafka.example.User", description = "Reference name") - - public String getName() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "io.confluent.kafka.example.User", + description = "Reference name") + public String getName() { return name; } @@ -53,11 +52,13 @@ public SchemaReference subject(String subject) { /** * Name of the referenced subject + * * @return subject - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the referenced subject") - - public String getSubject() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "User", + description = "Name of the referenced subject") + public String getSubject() { return subject; } @@ -72,11 +73,13 @@ public SchemaReference version(Integer version) { /** * Version number of the referenced subject + * * @return version - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number of the referenced subject") - - public Integer getVersion() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "1", + description = "Version number of the referenced subject") + public Integer getVersion() { return version; } @@ -84,7 +87,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -94,9 +96,9 @@ public boolean equals(java.lang.Object o) { return false; } SchemaReference schemaReference = (SchemaReference) o; - return Objects.equals(this.name, schemaReference.name) && - Objects.equals(this.subject, schemaReference.subject) && - Objects.equals(this.version, schemaReference.version); + return Objects.equals(this.name, schemaReference.name) + && Objects.equals(this.subject, schemaReference.subject) + && Objects.equals(this.version, schemaReference.version); } @Override @@ -108,7 +110,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaReference {\n"); - + sb.append(" name: ").append(toIndentedString(name)).append("\n"); sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); @@ -117,8 +119,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -126,4 +127,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java index d2832462a10c6..909416e6976b6 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaRegistryServerVersion.java @@ -1,21 +1,18 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * SchemaRegistryServerVersion - */ +/** SchemaRegistryServerVersion */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaRegistryServerVersion { +public class SchemaRegistryServerVersion { @JsonProperty("version") private String version = null; @@ -30,11 +27,11 @@ public SchemaRegistryServerVersion version(String version) { /** * Get version + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getVersion() { + public String getVersion() { return version; } @@ -49,11 +46,11 @@ public SchemaRegistryServerVersion commitId(String commitId) { /** * Get commitId + * * @return commitId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getCommitId() { + public String getCommitId() { return commitId; } @@ -61,7 +58,6 @@ public void setCommitId(String commitId) { this.commitId = commitId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -71,8 +67,8 @@ public boolean equals(java.lang.Object o) { return false; } SchemaRegistryServerVersion schemaRegistryServerVersion = (SchemaRegistryServerVersion) o; - return Objects.equals(this.version, schemaRegistryServerVersion.version) && - Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); + return Objects.equals(this.version, schemaRegistryServerVersion.version) + && Objects.equals(this.commitId, schemaRegistryServerVersion.commitId); } @Override @@ -84,7 +80,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaRegistryServerVersion {\n"); - + sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append(" commitId: ").append(toIndentedString(commitId)).append("\n"); sb.append("}"); @@ -92,8 +88,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -101,4 +96,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java index b2ea78e35ce22..977f5d410d667 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SchemaString.java @@ -1,25 +1,22 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.datahubproject.schema_registry.openapi.generated.SchemaReference; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.ArrayList; import java.util.List; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Schema definition - */ +/** Schema definition */ @io.swagger.v3.oas.annotations.media.Schema(description = "Schema definition") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SchemaString { +public class SchemaString { @JsonProperty("schemaType") private String schemaType = null; @@ -41,11 +38,11 @@ public SchemaString schemaType(String schemaType) { /** * Schema type + * * @return schemaType - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "AVRO", description = "Schema type") - - public String getSchemaType() { + public String getSchemaType() { return schemaType; } @@ -60,11 +57,13 @@ public SchemaString schema(String schema) { /** * Schema string identified by the ID + * * @return schema - **/ - @io.swagger.v3.oas.annotations.media.Schema(example = "{\"schema\": \"{\"type\": \"string\"}\"}", description = "Schema string identified by the ID") - - public String getSchema() { + */ + @io.swagger.v3.oas.annotations.media.Schema( + example = "{\"schema\": \"{\"type\": \"string\"}\"}", + description = "Schema string identified by the ID") + public String getSchema() { return schema; } @@ -87,11 +86,12 @@ public SchemaString addReferencesItem(SchemaReference referencesItem) { /** * References to other schemas + * * @return references - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "References to other schemas") - @Valid - public List getReferences() { + @Valid + public List getReferences() { return references; } @@ -106,11 +106,11 @@ public SchemaString maxId(Integer maxId) { /** * Maximum ID + * * @return maxId - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Maximum ID") - - public Integer getMaxId() { + public Integer getMaxId() { return maxId; } @@ -118,7 +118,6 @@ public void setMaxId(Integer maxId) { this.maxId = maxId; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -128,10 +127,10 @@ public boolean equals(java.lang.Object o) { return false; } SchemaString schemaString = (SchemaString) o; - return Objects.equals(this.schemaType, schemaString.schemaType) && - Objects.equals(this.schema, schemaString.schema) && - Objects.equals(this.references, schemaString.references) && - Objects.equals(this.maxId, schemaString.maxId); + return Objects.equals(this.schemaType, schemaString.schemaType) + && Objects.equals(this.schema, schemaString.schema) + && Objects.equals(this.references, schemaString.references) + && Objects.equals(this.maxId, schemaString.maxId); } @Override @@ -143,7 +142,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SchemaString {\n"); - + sb.append(" schemaType: ").append(toIndentedString(schemaType)).append("\n"); sb.append(" schema: ").append(toIndentedString(schema)).append("\n"); sb.append(" references: ").append(toIndentedString(references)).append("\n"); @@ -153,8 +152,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -162,4 +160,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java index 2ae476b0c3efc..e215d324f536e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/ServerClusterId.java @@ -1,24 +1,21 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; +import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; import java.util.HashMap; -import java.util.List; import java.util.Map; -import org.springframework.validation.annotation.Validated; +import java.util.Objects; import javax.validation.Valid; -import com.fasterxml.jackson.annotation.JsonInclude; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * ServerClusterId - */ +/** ServerClusterId */ @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class ServerClusterId { +public class ServerClusterId { @JsonProperty("scope") @Valid @@ -42,11 +39,11 @@ public ServerClusterId putScopeItem(String key, Object scopeItem) { /** * Get scope + * * @return scope - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public Map getScope() { + public Map getScope() { return scope; } @@ -61,11 +58,11 @@ public ServerClusterId id(String id) { /** * Get id + * * @return id - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(description = "") - - public String getId() { + public String getId() { return id; } @@ -73,7 +70,6 @@ public void setId(String id) { this.id = id; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -83,8 +79,8 @@ public boolean equals(java.lang.Object o) { return false; } ServerClusterId serverClusterId = (ServerClusterId) o; - return Objects.equals(this.scope, serverClusterId.scope) && - Objects.equals(this.id, serverClusterId.id); + return Objects.equals(this.scope, serverClusterId.scope) + && Objects.equals(this.id, serverClusterId.id); } @Override @@ -96,7 +92,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ServerClusterId {\n"); - + sb.append(" scope: ").append(toIndentedString(scope)).append("\n"); sb.append(" id: ").append(toIndentedString(id)).append("\n"); sb.append("}"); @@ -104,8 +100,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -113,4 +108,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java index 44379af934d5d..32b8979a0b71a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/datahubproject/schema_registry/openapi/generated/SubjectVersion.java @@ -1,22 +1,19 @@ package io.datahubproject.schema_registry.openapi.generated; -import java.util.Objects; -import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.annotation.JsonCreator; -import io.swagger.v3.oas.annotations.media.Schema; -import org.springframework.validation.annotation.Validated; -import javax.validation.Valid; import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Objects; import javax.validation.constraints.*; +import org.springframework.validation.annotation.Validated; -/** - * Subject version pair - */ +/** Subject version pair */ @io.swagger.v3.oas.annotations.media.Schema(description = "Subject version pair") @Validated -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @JsonInclude(JsonInclude.Include.NON_NULL) -public class SubjectVersion { +public class SubjectVersion { @JsonProperty("subject") private String subject = null; @@ -31,11 +28,11 @@ public SubjectVersion subject(String subject) { /** * Name of the subject + * * @return subject - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "User", description = "Name of the subject") - - public String getSubject() { + public String getSubject() { return subject; } @@ -50,11 +47,11 @@ public SubjectVersion version(Integer version) { /** * Version number + * * @return version - **/ + */ @io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "Version number") - - public Integer getVersion() { + public Integer getVersion() { return version; } @@ -62,7 +59,6 @@ public void setVersion(Integer version) { this.version = version; } - @Override public boolean equals(java.lang.Object o) { if (this == o) { @@ -72,8 +68,8 @@ public boolean equals(java.lang.Object o) { return false; } SubjectVersion subjectVersion = (SubjectVersion) o; - return Objects.equals(this.subject, subjectVersion.subject) && - Objects.equals(this.version, subjectVersion.version); + return Objects.equals(this.subject, subjectVersion.subject) + && Objects.equals(this.version, subjectVersion.version); } @Override @@ -85,7 +81,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class SubjectVersion {\n"); - + sb.append(" subject: ").append(toIndentedString(subject)).append("\n"); sb.append(" version: ").append(toIndentedString(version)).append("\n"); sb.append("}"); @@ -93,8 +89,7 @@ public String toString() { } /** - * Convert the given object to string with each line indented by 4 spaces - * (except the first line). + * Convert the given object to string with each line indented by 4 spaces (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { @@ -102,4 +97,4 @@ private String toIndentedString(java.lang.Object o) { } return o.toString().replace("\n", "\n "); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java index e30376002ae7b..6049cb96e1e45 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.CompatibilityCheckResponse; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.RegisterSchemaRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,96 +26,239 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface CompatibilityApi { - Logger log = LoggerFactory.getLogger(CompatibilityApi.class); + Logger log = LoggerFactory.getLogger(CompatibilityApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Test schema compatibility against a particular schema subject-version", description = "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity testCompatibilityBySubjectName(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against a particular schema subject-version", + description = + "Test input schema against a particular version of a subject's schema for compatibility. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity testCompatibilityBySubjectName( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the subject's schema against which compatibility is to be tested. Valid values for versionId are between [1,2^31-1] or the string \"latest\".\"latest\" checks compatibility of the input schema with the last registered schema under the specified subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Test schema compatibility against all schemas under a subject", description = "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", tags={ "Compatibility (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Compatibility check result.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = CompatibilityCheckResponse.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/compatibility/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity testCompatibilityForSubject(@Parameter(in = ParameterIn.PATH, description = "Subject of the schema version against which compatibility is to be tested", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to return detailed error messages" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "verbose", required = false) Boolean verbose) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", CompatibilityCheckResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); + @Operation( + summary = "Test schema compatibility against all schemas under a subject", + description = + "Test input schema against a subject's schemas for compatibility, based on the configured compatibility level of the subject. In other words, it will perform the same compatibility check as register for that subject. The compatibility level applied for the check is the configured compatibility level for the subject (http:get:: /config/(string: subject)). If this subject's compatibility level was never changed, then the global compatibility level applies (http:get:: /config).", + tags = {"Compatibility (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Compatibility check result.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = CompatibilityCheckResponse.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42201 indicates an invalid schema or schema type. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/compatibility/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity testCompatibilityForSubject( + @Parameter( + in = ParameterIn.PATH, + description = + "Subject of the schema version against which compatibility is to be tested", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return detailed error messages", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "verbose", required = false) + Boolean verbose) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"is_compatible\" : true,\n \"messages\" : [ ]\n}", + CompatibilityCheckResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default CompatibilityApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java index c30a01517d7d3..eac2fe8a3a02d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/CompatibilityApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class CompatibilityApiController implements CompatibilityApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public CompatibilityApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java index f041211c6db4d..2e3df2f62fc32 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.Config; import io.datahubproject.schema_registry.openapi.generated.ConfigUpdateRequest; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,200 +26,462 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ConfigApi { - Logger log = LoggerFactory.getLogger(ConfigApi.class); + Logger log = LoggerFactory.getLogger(ConfigApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject compatibility level", description = "Deletes the specified subject-level compatibility level config and reverts to the global default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity deleteSubjectConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete subject compatibility level", + description = + "Deletes the specified subject-level compatibility level config and reverts to the global default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity deleteSubjectConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete global compatibility level", description = "Deletes the global compatibility level config and reverts to the default.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity deleteTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"NONE\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Delete global compatibility level", + description = "Deletes the global compatibility level config and reverts to the default.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity deleteTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"NONE\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject compatibility level", description = "Retrieves compatibility level for a subject.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global compatibility level if subject compatibility level not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get subject compatibility level", + description = "Retrieves compatibility level for a subject.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return the global compatibility level if subject compatibility level not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global compatibility level", description = "Retrieves the global compatibility level.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Config.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getTopLevelConfig() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Get global compatibility level", + description = "Retrieves the global compatibility level.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Config.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getTopLevelConfig() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"compatibilityLevel\" : \"FULL_TRANSITIVE\"\n}", Config.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject compatibility level", description = "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity updateSubjectLevelConfig(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update subject compatibility level", + description = + "Update compatibility level for the specified subject. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity updateSubjectLevelConfig( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global compatibility level", description = "Updates the global compatibility level. On success, echoes the original request back to the client.", tags={ "Config (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ConfigUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/config", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity updateTopLevelConfig(@Parameter(in = ParameterIn.DEFAULT, description = "Config Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ConfigUpdateRequest body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); + @Operation( + summary = "Update global compatibility level", + description = + "Updates the global compatibility level. On success, echoes the original request back to the client.", + tags = {"Config (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ConfigUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42203 indicates invalid compatibility level.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/config", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity updateTopLevelConfig( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Config Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ConfigUpdateRequest body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"compatibility\" : \"FULL_TRANSITIVE\"\n}", ConfigUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ConfigApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java index cd3dc84fb4588..4fd6963797de2 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ConfigApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ConfigApiController implements ConfigApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ConfigApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java index 9ab0bc2388f7d..01b90a3c98c2d 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApi.java @@ -1,84 +1,101 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; -import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import com.fasterxml.jackson.databind.ObjectMapper; +import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.List; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ContextsApi { - Logger log = LoggerFactory.getLogger(ContextsApi.class); + Logger log = LoggerFactory.getLogger(ContextsApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "List contexts", description = "Retrieves a list of contexts.", tags={ "Contexts (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The contexts.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/contexts", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> listContexts() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); + @Operation( + summary = "List contexts", + description = "Retrieves a list of contexts.", + tags = {"Contexts (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The contexts.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/contexts", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> listContexts() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \".\", \".\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ContextsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java index faead1a2b37b0..8b601e8fb0f2e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ContextsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ContextsApiController implements ContextsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ContextsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java index 1d69c76c86122..711029371d583 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -27,69 +26,122 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface DefaultApi { - Logger log = LoggerFactory.getLogger(DefaultApi.class); + Logger log = LoggerFactory.getLogger(DefaultApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op.", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity get() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op.", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity get() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("\"\"", String.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "", description = "", tags={ }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "default response", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Map.class)))) }) - @RequestMapping(value = "/", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity> post(@Parameter(in = ParameterIn.DEFAULT, description = "", schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody Map body) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); + @Operation( + summary = "", + description = "", + tags = {}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "default response", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Map.class)))) + }) + @RequestMapping( + value = "/", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity> post( + @Parameter( + in = ParameterIn.DEFAULT, + description = "", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + Map body) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"key\" : \"\"\n}", Map.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default DefaultApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java index 53e64d43d9572..90768b88e2f28 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/DefaultApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class DefaultApiController implements DefaultApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public DefaultApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java index f2857069d05c8..7fca1cb53cfba 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApi.java @@ -1,22 +1,24 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; +import com.fasterxml.jackson.databind.ObjectMapper; import io.datahubproject.schema_registry.openapi.generated.ErrorMessage; import io.datahubproject.schema_registry.openapi.generated.Mode; import io.datahubproject.schema_registry.openapi.generated.ModeUpdateRequest; -import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; -import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponse; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.io.IOException; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.Valid; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; @@ -24,173 +26,398 @@ import org.springframework.validation.annotation.Validated; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; - -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface ModeApi { - Logger log = LoggerFactory.getLogger(ModeApi.class); + Logger log = LoggerFactory.getLogger(ModeApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete subject mode", description = "Deletes the specified subject-level mode and reverts to the global default.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns old mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity deleteSubjectMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Delete subject mode", + description = "Deletes the specified subject-level mode and reverts to the global default.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns old mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity deleteSubjectMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get subject mode", description = "Retrieves the subject mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The subject mode.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to return the global mode if subject mode not found" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "defaultToGlobal", required = false) Boolean defaultToGlobal) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get subject mode", + description = "Retrieves the subject mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The subject mode.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return the global mode if subject mode not found", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "defaultToGlobal", required = false) + Boolean defaultToGlobal) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get global mode", description = "Retrieves global mode.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The global mode", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), - - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store") }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getTopLevelMode() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Get global mode", + description = "Retrieves global mode.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The global mode", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema(implementation = Mode.class))), + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store") + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getTopLevelMode() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", Mode.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update subject mode", description = "Update mode for the specified subject. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity updateMode(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update subject mode", + description = + "Update mode for the specified subject. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity updateMode( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Update global mode", description = "Update global mode. On success, echoes the original request back to the client.", tags={ "Modes (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The original request.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ModeUpdateRequest.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/mode", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.PUT) - default ResponseEntity updateTopLevelMode(@Parameter(in = ParameterIn.DEFAULT, description = "Update Request", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody ModeUpdateRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to force update if setting mode to IMPORT and schemas currently exist" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "force", required = false) Boolean force) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); + @Operation( + summary = "Update global mode", + description = + "Update global mode. On success, echoes the original request back to the client.", + tags = {"Modes (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The original request.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ModeUpdateRequest.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable Entity. Error code 42204 indicates an invalid mode. Error code 42205 indicates operation not permitted.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store. Error code 50003 indicates a failure forwarding the request to the primary. Error code 50004 indicates unknown leader.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/mode", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.PUT) + default ResponseEntity updateTopLevelMode( + @Parameter( + in = ParameterIn.DEFAULT, + description = "Update Request", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + ModeUpdateRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to force update if setting mode to IMPORT and schemas currently exist", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "force", required = false) + Boolean force) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"mode\" : \"READWRITE\"\n}", ModeUpdateRequest.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default ModeApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java index 97ae54ea6c9a2..28ad6fbdfbc12 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/ModeApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class ModeApiController implements ModeApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public ModeApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java index cfb0fe183ee88..e01df38fca64a 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -32,180 +31,526 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SchemasApi { - Logger log = LoggerFactory.getLogger(SchemasApi.class); + Logger log = LoggerFactory.getLogger(SchemasApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get schema string by ID", description = "Retrieves the schema string identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SchemaString.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getSchema(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format, @Parameter(in = ParameterIn.QUERY, description = "Whether to fetch the maximum schema identifier that exists" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "fetchMaxId", required = false, defaultValue="false") Boolean fetchMaxId) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", SchemaString.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema string by ID", + description = "Retrieves the schema string identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SchemaString.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getSchema( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to fetch the maximum schema identifier that exists", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "fetchMaxId", required = false, defaultValue = "false") + Boolean fetchMaxId) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"maxId\" : 1,\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"schemaType\" : \"AVRO\"\n}", + SchemaString.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by ID", description = "Retrieves the schema identified by the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Raw schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getSchemaOnly(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Name of the subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Desired output format, dependent on schema type" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "format", required = false) String format) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "Get schema by ID", + description = "Retrieves the schema identified by the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Raw schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getSchemaOnly( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Name of the subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Desired output format, dependent on schema type", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "format", required = false) + String format) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List supported schema types", description = "Retrieve the schema types supported by this registry.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of supported schema types.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/types", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> getSchemaTypes() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List supported schema types", + description = "Retrieve the schema types supported by this registry.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of supported schema types.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/types", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> getSchemaTypes() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"AVRO\", \"AVRO\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas", description = "Get the schemas matching the specified parameters.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of schemas matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> getSchemas(@Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subjectPrefix", required = false) String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to return soft deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "deleted", required = false, defaultValue="false") Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return latest schema versions only for each matching subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="false")) @Valid @RequestParam(value = "latestOnly", required = false, defaultValue="false") Boolean latestOnly, @Parameter(in = ParameterIn.QUERY, description = "Pagination offset for results" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="0")) @Valid @RequestParam(value = "offset", required = false, defaultValue="0") Integer offset, @Parameter(in = ParameterIn.QUERY, description = "Pagination size for results. Ignored if negative" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue="-1")) @Valid @RequestParam(value = "limit", required = false, defaultValue="-1") Integer limit) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List schemas", + description = "Get the schemas matching the specified parameters.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of schemas matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> getSchemas( + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subjectPrefix", required = false) + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return soft deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "deleted", required = false, defaultValue = "false") + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = + "Whether to return latest schema versions only for each matching subject", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "false")) + @Valid + @RequestParam(value = "latestOnly", required = false, defaultValue = "false") + Boolean latestOnly, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination offset for results", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "0")) + @Valid + @RequestParam(value = "offset", required = false, defaultValue = "0") + Integer offset, + @Parameter( + in = ParameterIn.QUERY, + description = "Pagination size for results. Ignored if negative", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = "-1")) + @Valid + @RequestParam(value = "limit", required = false, defaultValue = "-1") + Integer limit) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}, {\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects associated to schema ID", description = "Retrieves all the subjects associated with a particular schema ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> getSubjects(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subjects where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subjects associated to schema ID", + description = "Retrieves all the subjects associated with a particular schema ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> getSubjects( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subjects where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subject-versions associated to schema ID", description = "Get all the subject-version pairs associated with the input ID.", tags={ "Schemas (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subject versions matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = SubjectVersion.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/schemas/ids/{id}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> getVersions(@Parameter(in = ParameterIn.PATH, description = "Globally unique identifier of the schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("id") Integer id, @Parameter(in = ParameterIn.QUERY, description = "Filters results by the respective subject" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "subject", required = false) String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include subject versions where the schema was deleted" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); + @Operation( + summary = "List subject-versions associated to schema ID", + description = "Get all the subject-version pairs associated with the input ID.", + tags = {"Schemas (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subject versions matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = SubjectVersion.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/schemas/ids/{id}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> getVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Globally unique identifier of the schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("id") + Integer id, + @Parameter( + in = ParameterIn.QUERY, + description = "Filters results by the respective subject", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "subject", required = false) + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include subject versions where the schema was deleted", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "[ {\n \"subject\" : \"User\",\n \"version\" : 1\n}, {\n \"subject\" : \"User\",\n \"version\" : 1\n} ]", + List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SchemasApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java index 4131a93695f13..6581f5f39a647 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SchemasApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SchemasApiController implements SchemasApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SchemasApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java index 8977fc06387c2..544a1aff8008e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApi.java @@ -1,7 +1,6 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; @@ -33,275 +32,847 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface SubjectsApi { - Logger log = LoggerFactory.getLogger(SubjectsApi.class); + Logger log = LoggerFactory.getLogger(SubjectsApi.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Delete schema version", description = "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns the schema version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity deleteSchemaVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete schema version", + description = + "Deletes a specific version of the schema registered under this subject. This only deletes the version and the schema ID remains intact making it still possible to decode data using the schema ID. This API is recommended to be used only in development environments or under extreme circumstances where-in, its required to delete a previously registered schema for compatibility purposes or re-register previously registered schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns the schema version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity deleteSchemaVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("1", Integer.class), HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Delete subject", description = "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Operation succeeded. Returns list of schema versions deleted", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.DELETE) - default ResponseEntity> deleteSubject(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to perform a permanent delete" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "permanent", required = false) Boolean permanent) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Delete subject", + description = + "Deletes the specified subject and its associated compatibility level if registered. It is recommended to use this API only when a topic needs to be recycled or in development environment.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Operation succeeded. Returns list of schema versions deleted", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.DELETE) + default ResponseEntity> deleteSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to perform a permanent delete", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "permanent", required = false) + Boolean permanent) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List schemas referencing a schema", description = "Retrieves the IDs of schemas that reference the specified schema.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of IDs for schemas that reference the specified schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/referencedby", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> getReferencedBy(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List schemas referencing a schema", + description = "Retrieves the IDs of schemas that reference the specified schema.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of IDs for schemas that reference the specified schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/referencedby", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> getReferencedBy( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 100001, 100001 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema by version", description = "Retrieves a specific version of the schema registered under this subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getSchemaByVersion(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema by version", + description = "Retrieves a specific version of the schema registered under this subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getSchemaByVersion( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get schema string by version", description = "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema string.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions/{version}/schema", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity getSchemaOnly2(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.PATH, description = "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("version") String version, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Get schema string by version", + description = + "Retrieves the schema for the specified version of this subject. Only the unescaped schema string is returned.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema string.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40402 indicates version not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = "Unprocessable Entity. Error code 42202 indicates an invalid version.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions/{version}/schema", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity getSchemaOnly2( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.PATH, + description = + "Version of the schema to be returned. Valid values for versionId are between [1,2^31-1] or the string \"latest\". \"latest\" returns the last registered schema under the specified subject. Note that there may be a new latest schema that gets registered right after this request is served.", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("version") + String version, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("\"{\"schema\": \"{\"type\": \"string\"}\"}\"", String.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List subjects", description = "Retrieves a list of registered subjects matching specified parameters.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of subjects matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = String.class)))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> list(@Parameter(in = ParameterIn.QUERY, description = "Subject name prefix" ,schema=@io.swagger.v3.oas.annotations.media.Schema( defaultValue=":*:")) @Valid @RequestParam(value = "subjectPrefix", required = false, defaultValue=":*:") String subjectPrefix, @Parameter(in = ParameterIn.QUERY, description = "Whether to look up deleted subjects" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted subjects only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List subjects", + description = "Retrieves a list of registered subjects matching specified parameters.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of subjects matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = String.class)))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> list( + @Parameter( + in = ParameterIn.QUERY, + description = "Subject name prefix", + schema = @io.swagger.v3.oas.annotations.media.Schema(defaultValue = ":*:")) + @Valid + @RequestParam(value = "subjectPrefix", required = false, defaultValue = ":*:") + String subjectPrefix, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to look up deleted subjects", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted subjects only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ \"User\", \"User\" ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "List versions under subject", description = "Retrieves a list of versions registered under the specified subject.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "List of version numbers matching the specified parameters.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", array = @ArraySchema(schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Integer.class)))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - method = RequestMethod.GET) - default ResponseEntity> listVersions(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.QUERY, description = "Whether to include deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted, @Parameter(in = ParameterIn.QUERY, description = "Whether to return deleted schemas only" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deletedOnly", required = false) Boolean deletedOnly) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("[ 1, 1 ]", List.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "List versions under subject", + description = "Retrieves a list of versions registered under the specified subject.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "List of version numbers matching the specified parameters.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + array = + @ArraySchema( + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Integer.class)))), + @ApiResponse( + responseCode = "404", + description = "Not Found. Error code 40401 indicates subject not found. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + method = RequestMethod.GET) + default ResponseEntity> listVersions( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to include deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to return deleted schemas only", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deletedOnly", required = false) + Boolean deletedOnly) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper().get().readValue("[ 1, 1 ]", List.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Lookup schema under subject", description = "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", tags={ "Subjects (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "The schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = Schema.class))), - - @ApiResponse(responseCode = "404", description = "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity lookUpSchemaUnderSubject(@Parameter(in = ParameterIn.PATH, description = "Subject under which the schema will be registered", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize, @Parameter(in = ParameterIn.QUERY, description = "Whether to lookup deleted schemas" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "deleted", required = false) Boolean deleted) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", Schema.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Lookup schema under subject", + description = + "Check if a schema has already been registered under the specified subject. If so, this returns the schema string along with its globally unique identifier, its version under this subject and the subject name.", + tags = {"Subjects (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "The schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = Schema.class))), + @ApiResponse( + responseCode = "404", + description = + "Not Found. Error code 40401 indicates subject not found. Error code 40403 indicates schema not found.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = "Internal Server Error.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity lookUpSchemaUnderSubject( + @Parameter( + in = ParameterIn.PATH, + description = "Subject under which the schema will be registered", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to lookup deleted schemas", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "deleted", required = false) + Boolean deleted) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue( + "{\n \"schema\" : \"{\"schema\": \"{\"type\": \"string\"}\"}\",\n \"references\" : [ {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n }, {\n \"subject\" : \"User\",\n \"name\" : \"io.confluent.kafka.example.User\",\n \"version\" : 1\n } ],\n \"subject\" : \"User\",\n \"schemaType\" : \"AVRO\",\n \"id\" : 100001,\n \"version\" : 1\n}", + Schema.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Register schema under a subject", description = "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", tags={ "Subjects (v1)"}, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "200", description = "Schema successfully registered.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = RegisterSchemaResponse.class))), - - @ApiResponse(responseCode = "409", description = "Conflict. Incompatible schema.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "422", description = "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))), - - @ApiResponse(responseCode = "500", description = "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", content = @Content(mediaType = "application/vnd.schemaregistry.v1+json", schema = @io.swagger.v3.oas.annotations.media.Schema(implementation = ErrorMessage.class))) }) - @RequestMapping(value = "/subjects/{subject}/versions", - produces = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json; qs=0.9", "application/json; qs=0.5" }, - consumes = { "application/vnd.schemaregistry.v1+json", "application/vnd.schemaregistry+json", "application/json", "application/octet-stream" }, - method = RequestMethod.POST) - default ResponseEntity register(@Parameter(in = ParameterIn.PATH, description = "Name of the subject", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @PathVariable("subject") String subject, @Parameter(in = ParameterIn.DEFAULT, description = "Schema", required=true, schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestBody RegisterSchemaRequest body, @Parameter(in = ParameterIn.QUERY, description = "Whether to register the normalized schema" ,schema=@io.swagger.v3.oas.annotations.media.Schema()) @Valid @RequestParam(value = "normalize", required = false) Boolean normalize) { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - if (getAcceptHeader().get().contains("application/json")) { - try { - return new ResponseEntity<>(getObjectMapper().get().readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), HttpStatus.NOT_IMPLEMENTED); - } catch (IOException e) { - log.error("Couldn't serialize response for content type application/json", e); - return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); - } - } - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); + @Operation( + summary = "Register schema under a subject", + description = + "Register a new schema under the specified subject. If successfully registered, this returns the unique identifier of this schema in the registry. The returned identifier should be used to retrieve this schema from the schemas resource and is different from the schema's version which is associated with the subject. If the same schema is registered under a different subject, the same identifier will be returned. However, the version of the schema may be different under different subjects. A schema should be compatible with the previously registered schema or schemas (if there are any) as per the configured compatibility level. The configured compatibility level can be obtained by issuing a GET http:get:: /config/(string: subject). If that returns null, then GET http:get:: /config When there are multiple instances of Schema Registry running in the same cluster, the schema registration request will be forwarded to one of the instances designated as the primary. If the primary is not available, the client will get an error code indicating that the forwarding has failed.", + tags = {"Subjects (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "200", + description = "Schema successfully registered.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = RegisterSchemaResponse.class))), + @ApiResponse( + responseCode = "409", + description = "Conflict. Incompatible schema.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "422", + description = + "Unprocessable entity. Error code 42201 indicates an invalid schema or schema type. ", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))), + @ApiResponse( + responseCode = "500", + description = + "Internal Server Error. Error code 50001 indicates a failure in the backend data store.Error code 50002 indicates operation timed out. Error code 50003 indicates a failure forwarding the request to the primary.", + content = + @Content( + mediaType = "application/vnd.schemaregistry.v1+json", + schema = + @io.swagger.v3.oas.annotations.media.Schema( + implementation = ErrorMessage.class))) + }) + @RequestMapping( + value = "/subjects/{subject}/versions", + produces = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json; qs=0.9", + "application/json; qs=0.5" + }, + consumes = { + "application/vnd.schemaregistry.v1+json", + "application/vnd.schemaregistry+json", + "application/json", + "application/octet-stream" + }, + method = RequestMethod.POST) + default ResponseEntity register( + @Parameter( + in = ParameterIn.PATH, + description = "Name of the subject", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @PathVariable("subject") + String subject, + @Parameter( + in = ParameterIn.DEFAULT, + description = "Schema", + required = true, + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestBody + RegisterSchemaRequest body, + @Parameter( + in = ParameterIn.QUERY, + description = "Whether to register the normalized schema", + schema = @io.swagger.v3.oas.annotations.media.Schema()) + @Valid + @RequestParam(value = "normalize", required = false) + Boolean normalize) { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + if (getAcceptHeader().get().contains("application/json")) { + try { + return new ResponseEntity<>( + getObjectMapper() + .get() + .readValue("{\n \"id\" : 100001\n}", RegisterSchemaResponse.class), + HttpStatus.NOT_IMPLEMENTED); + } catch (IOException e) { + log.error("Couldn't serialize response for content type application/json", e); + return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR); } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default SubjectsApi interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java index f9b634add7b2a..779a56d6de540 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/SubjectsApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class SubjectsApiController implements SubjectsApi { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public SubjectsApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java index 268d50aa3a68a..65961426ec364 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1Api.java @@ -1,86 +1,82 @@ /** * NOTE: This class is auto generated by the swagger code generator program (3.0.33). - * https://github.com/swagger-api/swagger-codegen - * Do not edit the class manually. + * https://github.com/swagger-api/swagger-codegen Do not edit the class manually. */ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; import io.swagger.v3.oas.annotations.Operation; -import io.swagger.v3.oas.annotations.Parameter; -import io.swagger.v3.oas.annotations.enums.ParameterIn; -import io.swagger.v3.oas.annotations.responses.ApiResponses; import io.swagger.v3.oas.annotations.responses.ApiResponse; -import io.swagger.v3.oas.annotations.media.ArraySchema; -import io.swagger.v3.oas.annotations.media.Content; -import io.swagger.v3.oas.annotations.security.SecurityRequirement; +import io.swagger.v3.oas.annotations.responses.ApiResponses; +import java.util.Optional; +import javax.servlet.http.HttpServletRequest; +import javax.validation.constraints.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.validation.annotation.Validated; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; -import org.springframework.web.bind.annotation.RequestPart; -import org.springframework.web.multipart.MultipartFile; -import org.springframework.web.bind.annotation.CookieValue; -import javax.servlet.http.HttpServletRequest; -import javax.validation.Valid; -import javax.validation.constraints.*; -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Optional; - -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Validated public interface V1Api { - Logger log = LoggerFactory.getLogger(V1Api.class); + Logger log = LoggerFactory.getLogger(V1Api.class); - default Optional getObjectMapper(){ - return Optional.empty(); - } + default Optional getObjectMapper() { + return Optional.empty(); + } - default Optional getRequest(){ - return Optional.empty(); - } + default Optional getRequest() { + return Optional.empty(); + } - default Optional getAcceptHeader() { - return getRequest().map(r -> r.getHeader("Accept")); - } + default Optional getAcceptHeader() { + return getRequest().map(r -> r.getHeader("Accept")); + } - @Operation(summary = "Get the server metadata", description = "", tags={ "Server Metadata (v1)" }, hidden = true) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/id", - method = RequestMethod.GET) - default ResponseEntity getClusterId() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get the server metadata", + description = "", + tags = {"Server Metadata (v1)"}, + hidden = true) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/id", method = RequestMethod.GET) + default ResponseEntity getClusterId() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } - - @Operation(summary = "Get Schema Registry server version", description = "", tags={ "Server Metadata (v1)" }) - @ApiResponses(value = { - @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend data store ") }) - @RequestMapping(value = "/v1/metadata/version", - method = RequestMethod.GET) - default ResponseEntity getSchemaRegistryVersion() { - if(getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { - } else { - log.warn("ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); - } - return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + @Operation( + summary = "Get Schema Registry server version", + description = "", + tags = {"Server Metadata (v1)"}) + @ApiResponses( + value = { + @ApiResponse( + responseCode = "500", + description = "Error code 50001 -- Error in the backend data store ") + }) + @RequestMapping(value = "/v1/metadata/version", method = RequestMethod.GET) + default ResponseEntity getSchemaRegistryVersion() { + if (getObjectMapper().isPresent() && getAcceptHeader().isPresent()) { + } else { + log.warn( + "ObjectMapper or HttpServletRequest not configured in default V1Api interface so no example is generated"); } - + return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED); + } } - diff --git a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java index d65db3be11231..90e56a914652e 100644 --- a/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java +++ b/metadata-service/schema-registry-api/generated/src/main/java/io/swagger/api/V1ApiController.java @@ -1,31 +1,33 @@ package io.swagger.api; import com.fasterxml.jackson.databind.ObjectMapper; -import org.springframework.stereotype.Controller; -import javax.servlet.http.HttpServletRequest; import java.util.Optional; -@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") +import javax.servlet.http.HttpServletRequest; +import org.springframework.stereotype.Controller; + +@javax.annotation.Generated( + value = "io.swagger.codegen.v3.generators.java.SpringCodegen", + date = "2022-12-20T16:52:36.517693Z[Europe/Lisbon]") @Controller public class V1ApiController implements V1Api { - private final ObjectMapper objectMapper; - - private final HttpServletRequest request; + private final ObjectMapper objectMapper; - @org.springframework.beans.factory.annotation.Autowired - public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { - this.objectMapper = objectMapper; - this.request = request; - } + private final HttpServletRequest request; - @Override - public Optional getObjectMapper() { - return Optional.ofNullable(objectMapper); - } + @org.springframework.beans.factory.annotation.Autowired + public V1ApiController(ObjectMapper objectMapper, HttpServletRequest request) { + this.objectMapper = objectMapper; + this.request = request; + } - @Override - public Optional getRequest() { - return Optional.ofNullable(request); - } + @Override + public Optional getObjectMapper() { + return Optional.ofNullable(objectMapper); + } + @Override + public Optional getRequest() { + return Optional.ofNullable(request); + } } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java index 0cf57361e58f8..3790bbde8e39f 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/SchemaRegistryController.java @@ -35,17 +35,23 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; - -/** - * DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. - */ +/** DataHub Rest Controller implementation for Confluent's Schema Registry OpenAPI spec. */ @Slf4j @RestController @RequestMapping("/api") @RequiredArgsConstructor -@ConditionalOnProperty(name = "kafka.schemaRegistry.type", havingValue = InternalSchemaRegistryFactory.TYPE) +@ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) public class SchemaRegistryController - implements CompatibilityApi, ConfigApi, ContextsApi, DefaultApi, ModeApi, SchemasApi, SubjectsApi, V1Api { + implements CompatibilityApi, + ConfigApi, + ContextsApi, + DefaultApi, + ModeApi, + SchemasApi, + SubjectsApi, + V1Api { private final ObjectMapper objectMapper; @@ -82,7 +88,8 @@ public ResponseEntity getSchemaRegistryVersion() { } @Override - public ResponseEntity deleteSchemaVersion(String subject, String version, Boolean permanent) { + public ResponseEntity deleteSchemaVersion( + String subject, String version, Boolean permanent) { log.error("[SubjectsApi] deleteSchemaVersion method not implemented"); return SubjectsApi.super.deleteSchemaVersion(subject, version, permanent); } @@ -100,7 +107,8 @@ public ResponseEntity> getReferencedBy(String subject, String vers } @Override - public ResponseEntity getSchemaByVersion(String subject, String version, Boolean deleted) { + public ResponseEntity getSchemaByVersion( + String subject, String version, Boolean deleted) { log.error("[SubjectsApi] getSchemaByVersion method not implemented"); return SubjectsApi.super.getSchemaByVersion(subject, version, deleted); } @@ -112,20 +120,22 @@ public ResponseEntity getSchemaOnly2(String subject, String version, Boo } @Override - public ResponseEntity> list(String subjectPrefix, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity> list( + String subjectPrefix, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] list method not implemented"); return SubjectsApi.super.list(subjectPrefix, deleted, deletedOnly); } @Override - public ResponseEntity> listVersions(String subject, Boolean deleted, Boolean deletedOnly) { + public ResponseEntity> listVersions( + String subject, Boolean deleted, Boolean deletedOnly) { log.error("[SubjectsApi] listVersions method not implemented"); return SubjectsApi.super.listVersions(subject, deleted, deletedOnly); } @Override - public ResponseEntity lookUpSchemaUnderSubject(String subject, RegisterSchemaRequest body, Boolean normalize, - Boolean deleted) { + public ResponseEntity lookUpSchemaUnderSubject( + String subject, RegisterSchemaRequest body, Boolean normalize, Boolean deleted) { log.error("[SubjectsApi] lookUpSchemaUnderSubject method not implemented"); return SubjectsApi.super.lookUpSchemaUnderSubject(subject, body, normalize, deleted); } @@ -149,26 +159,33 @@ public ResponseEntity getTopLevelMode() { } @Override - public ResponseEntity updateMode(String subject, ModeUpdateRequest body, Boolean force) { + public ResponseEntity updateMode( + String subject, ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateMode method not implemented"); return ModeApi.super.updateMode(subject, body, force); } @Override - public ResponseEntity updateTopLevelMode(ModeUpdateRequest body, Boolean force) { + public ResponseEntity updateTopLevelMode( + ModeUpdateRequest body, Boolean force) { log.error("[ModeApi] updateTopLevelMode method not implemented"); return ModeApi.super.updateTopLevelMode(body, force); } @Override - @Operation(summary = "Schema Registry Root Resource", description = "The Root resource is a no-op, only used to " - + "validate endpoint is ready.", tags = { "Schema Registry Base" }) + @Operation( + summary = "Schema Registry Root Resource", + description = "The Root resource is a no-op, only used to " + "validate endpoint is ready.", + tags = {"Schema Registry Base"}) public ResponseEntity get() { return new ResponseEntity<>(HttpStatus.OK); } @Override - @Operation(summary = "", description = "", tags = { "Schema Registry Base" }) + @Operation( + summary = "", + description = "", + tags = {"Schema Registry Base"}) public ResponseEntity> post(Map body) { log.error("[DefaultApi] post method not implemented"); return DefaultApi.super.post(body); @@ -205,7 +222,8 @@ public ResponseEntity getTopLevelConfig() { } @Override - public ResponseEntity updateSubjectLevelConfig(String subject, ConfigUpdateRequest body) { + public ResponseEntity updateSubjectLevelConfig( + String subject, ConfigUpdateRequest body) { log.error("[ConfigApi] updateSubjectLevelConfig method not implemented"); return ConfigApi.super.updateSubjectLevelConfig(subject, body); } @@ -217,44 +235,55 @@ public ResponseEntity updateTopLevelConfig(ConfigUpdateRequ } @Override - public ResponseEntity testCompatibilityBySubjectName(String subject, String version, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity testCompatibilityBySubjectName( + String subject, String version, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityBySubjectName method not implemented"); return CompatibilityApi.super.testCompatibilityBySubjectName(subject, version, body, verbose); } @Override - public ResponseEntity testCompatibilityForSubject(String subject, - RegisterSchemaRequest body, Boolean verbose) { + public ResponseEntity testCompatibilityForSubject( + String subject, RegisterSchemaRequest body, Boolean verbose) { log.error("[CompatibilityApi] testCompatibilityForSubject method not implemented"); return CompatibilityApi.super.testCompatibilityForSubject(subject, body, verbose); } @Override - public ResponseEntity register(String subject, RegisterSchemaRequest body, - Boolean normalize) { + public ResponseEntity register( + String subject, RegisterSchemaRequest body, Boolean normalize) { final String topicName = subject.replaceFirst("-value", ""); - return _schemaRegistryService.getSchemaIdForTopic(topicName).map(id -> { - final RegisterSchemaResponse response = new RegisterSchemaResponse(); - return new ResponseEntity<>(response.id(id), HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with name {}.", topicName); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); - } - - @Override - public ResponseEntity getSchema(Integer id, String subject, String format, Boolean fetchMaxId) { - return _schemaRegistryService.getSchemaForId(id).map(schema -> { - SchemaString result = new SchemaString(); - result.setMaxId(id); - result.setSchemaType("AVRO"); - result.setSchema(schema.toString()); - return new ResponseEntity<>(result, HttpStatus.OK); - }).orElseGet(() -> { - log.error("Couldn't find topic with id {}.", id); - return new ResponseEntity<>(HttpStatus.NOT_FOUND); - }); + return _schemaRegistryService + .getSchemaIdForTopic(topicName) + .map( + id -> { + final RegisterSchemaResponse response = new RegisterSchemaResponse(); + return new ResponseEntity<>(response.id(id), HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with name {}.", topicName); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); + } + + @Override + public ResponseEntity getSchema( + Integer id, String subject, String format, Boolean fetchMaxId) { + return _schemaRegistryService + .getSchemaForId(id) + .map( + schema -> { + SchemaString result = new SchemaString(); + result.setMaxId(id); + result.setSchemaType("AVRO"); + result.setSchema(schema.toString()); + return new ResponseEntity<>(result, HttpStatus.OK); + }) + .orElseGet( + () -> { + log.error("Couldn't find topic with id {}.", id); + return new ResponseEntity<>(HttpStatus.NOT_FOUND); + }); } @Override @@ -270,8 +299,8 @@ public ResponseEntity> getSchemaTypes() { } @Override - public ResponseEntity> getSchemas(String subjectPrefix, Boolean deleted, Boolean latestOnly, - Integer offset, Integer limit) { + public ResponseEntity> getSchemas( + String subjectPrefix, Boolean deleted, Boolean latestOnly, Integer offset, Integer limit) { log.error("[SchemasApi] getSchemas method not implemented"); return SchemasApi.super.getSchemas(subjectPrefix, deleted, latestOnly, offset, limit); } @@ -283,7 +312,8 @@ public ResponseEntity> getSubjects(Integer id, String subject, Bool } @Override - public ResponseEntity> getVersions(Integer id, String subject, Boolean deleted) { + public ResponseEntity> getVersions( + Integer id, String subject, Boolean deleted) { log.error("[SchemasApi] getVersions method not implemented"); return SchemasApi.super.getVersions(id, subject, deleted); } diff --git a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java index d217d501630e3..98163a7d91420 100644 --- a/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java +++ b/metadata-service/schema-registry-servlet/src/main/java/io/datahubproject/openapi/schema/registry/config/SpringWebSchemaRegistryConfig.java @@ -13,10 +13,10 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc; import org.springframework.web.servlet.config.annotation.WebMvcConfigurer; - @EnableWebMvc -@OpenAPIDefinition(info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), - servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) +@OpenAPIDefinition( + info = @Info(title = "DataHub OpenAPI", version = "1.0.0"), + servers = {@Server(url = "/schema-registry/", description = "Schema Registry Server URL")}) @Configuration public class SpringWebSchemaRegistryConfig implements WebMvcConfigurer { @@ -27,4 +27,4 @@ public void configureMessageConverters(List> messageConv messageConverters.add(new FormHttpMessageConverter()); messageConverters.add(new MappingJackson2HttpMessageConverter()); } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java index 1d34008ebf1be..4dffe1e633c6c 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServer.java @@ -12,5 +12,4 @@ public class OpenAPISpringTestServer { public static void main(String[] args) { SpringApplication.run(OpenAPISpringTestServer.class, args); } - } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java index 4e31dea6dee1f..1aa0361117c18 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/OpenAPISpringTestServerConfiguration.java @@ -6,9 +6,12 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.web.servlet.DispatcherServlet; - @TestConfiguration -@ComponentScan(basePackages = {"io.datahubproject.openapi.schema.registry", "com.linkedin.metadata.schema.registry"}) +@ComponentScan( + basePackages = { + "io.datahubproject.openapi.schema.registry", + "com.linkedin.metadata.schema.registry" + }) public class OpenAPISpringTestServerConfiguration { @Bean @@ -17,7 +20,8 @@ public DispatcherServlet dispatcherServlet() { } @Bean - public ServletRegistrationBean servletRegistrationBean(DispatcherServlet dispatcherServlet) { + public ServletRegistrationBean servletRegistrationBean( + DispatcherServlet dispatcherServlet) { return new ServletRegistrationBean<>(dispatcherServlet, "/"); } } diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java index 4e1bb09ab205b..664766f204e46 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTest.java @@ -1,5 +1,8 @@ package io.datahubproject.openapi.test; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.ByteString; @@ -45,25 +48,25 @@ import org.testcontainers.utility.DockerImageName; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - @ActiveProfiles("test") @ContextConfiguration @SpringBootTest( webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, - classes = {OpenAPISpringTestServer.class, OpenAPISpringTestServerConfiguration.class, - SchemaRegistryControllerTestConfiguration.class}) + classes = { + OpenAPISpringTestServer.class, + OpenAPISpringTestServerConfiguration.class, + SchemaRegistryControllerTestConfiguration.class + }) @EnableKafka public class SchemaRegistryControllerTest extends AbstractTestNGSpringContextTests { private static final String CONFLUENT_PLATFORM_VERSION = "7.2.2"; - static KafkaContainer kafka = new KafkaContainer( - DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) - .withReuse(true) - .withStartupAttempts(5) - .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); + static KafkaContainer kafka = + new KafkaContainer( + DockerImageName.parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) + .withReuse(true) + .withStartupAttempts(5) + .withStartupTimeout(Duration.of(30, ChronoUnit.SECONDS)); @DynamicPropertySource static void kafkaProperties(DynamicPropertyRegistry registry) { @@ -73,8 +76,7 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { registry.add("kafka.schemaRegistry.url", () -> "http://localhost:53222/api/"); } - @Autowired - EventProducer _producer; + @Autowired EventProducer _producer; private final CountDownLatch mcpLatch = new CountDownLatch(1); @@ -89,7 +91,8 @@ static void kafkaProperties(DynamicPropertyRegistry registry) { private final AtomicReference peRef = new AtomicReference<>(); @Test - public void testMCPConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCPConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -101,7 +104,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu gmce.setAspectName("datasetProperties"); final JacksonDataTemplateCodec dataTemplateCodec = new JacksonDataTemplateCodec(); - final byte[] datasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] datasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); final GenericAspect genericAspect = new GenericAspect(); genericAspect.setValue(ByteString.unsafeWrap(datasetPropertiesSerialized)); genericAspect.setContentType("application/json"); @@ -115,7 +119,8 @@ public void testMCPConsumption() throws IOException, InterruptedException, Execu } @Test - public void testMCLConsumption() throws IOException, InterruptedException, ExecutionException, TimeoutException { + public void testMCLConsumption() + throws IOException, InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final DatasetProperties datasetProperties = new DatasetProperties(); datasetProperties.setName("Foo Bar"); @@ -130,7 +135,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set old aspect final GenericAspect oldAspect = new GenericAspect(); - final byte[] oldDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] oldDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); oldAspect.setValue(ByteString.unsafeWrap(oldDatasetPropertiesSerialized)); oldAspect.setContentType("application/json"); metadataChangeLog.setPreviousAspectValue(GenericRecordUtils.serializeAspect(oldAspect)); @@ -139,16 +145,20 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu // Set new aspect final GenericAspect newAspectValue = new GenericAspect(); datasetProperties.setDescription("Updated data"); - final byte[] newDatasetPropertiesSerialized = dataTemplateCodec.dataTemplateToBytes(datasetProperties); + final byte[] newDatasetPropertiesSerialized = + dataTemplateCodec.dataTemplateToBytes(datasetProperties); newAspectValue.setValue(ByteString.unsafeWrap(newDatasetPropertiesSerialized)); newAspectValue.setContentType("application/json"); metadataChangeLog.setAspect(GenericRecordUtils.serializeAspect(newAspectValue)); metadataChangeLog.setSystemMetadata(SystemMetadataUtils.createDefaultSystemMetadata()); final MockEntitySpec entitySpec = new MockEntitySpec("dataset"); - final AspectSpec aspectSpec = entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); + final AspectSpec aspectSpec = + entitySpec.createAspectSpec(datasetProperties, DATASET_PROPERTIES_ASPECT_NAME); - _producer.produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog).get(10, TimeUnit.SECONDS); + _producer + .produceMetadataChangeLog(entityUrn, aspectSpec, metadataChangeLog) + .get(10, TimeUnit.SECONDS); final boolean messageConsumed = mclLatch.await(10, TimeUnit.SECONDS); assertTrue(messageConsumed); assertEquals(mclLatch.getCount(), 0); @@ -156,7 +166,8 @@ public void testMCLConsumption() throws IOException, InterruptedException, Execu } @Test - public void testPEConsumption() throws InterruptedException, ExecutionException, TimeoutException { + public void testPEConsumption() + throws InterruptedException, ExecutionException, TimeoutException { final Urn entityUrn = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"); final EntityChangeEvent changeEvent = new EntityChangeEvent(); @@ -172,11 +183,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, final PlatformEvent platformEvent = new PlatformEvent(); platformEvent.setName(CHANGE_EVENT_PLATFORM_EVENT_NAME); - platformEvent.setHeader( - new PlatformEventHeader().setTimestampMillis(123L)); + platformEvent.setHeader(new PlatformEventHeader().setTimestampMillis(123L)); platformEvent.setPayload(GenericRecordUtils.serializePayload(changeEvent)); - _producer.producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) + _producer + .producePlatformEvent(CHANGE_EVENT_PLATFORM_EVENT_NAME, "Some key", platformEvent) .get(10, TimeUnit.SECONDS); final boolean messageConsumed = peLatch.await(10, TimeUnit.SECONDS); @@ -185,8 +196,11 @@ public void testPEConsumption() throws InterruptedException, ExecutionException, assertEquals(peRef.get(), platformEvent); } - @KafkaListener(id = "test-mcp-consumer", topics = Topics.METADATA_CHANGE_PROPOSAL, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcp-consumer", + topics = Topics.METADATA_CHANGE_PROPOSAL, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCP(ConsumerRecord consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -199,8 +213,11 @@ public void receiveMCP(ConsumerRecord consumerRecord) { } } - @KafkaListener(id = "test-mcl-consumer", topics = Topics.METADATA_CHANGE_LOG_VERSIONED, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-mcl-consumer", + topics = Topics.METADATA_CHANGE_LOG_VERSIONED, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receiveMCL(ConsumerRecord consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -212,8 +229,11 @@ public void receiveMCL(ConsumerRecord consumerRecord) { } } - @KafkaListener(id = "test-pe-consumer", topics = Topics.PLATFORM_EVENT, - containerFactory = "kafkaEventConsumer", properties = {"auto.offset.reset:earliest"}) + @KafkaListener( + id = "test-pe-consumer", + topics = Topics.PLATFORM_EVENT, + containerFactory = "kafkaEventConsumer", + properties = {"auto.offset.reset:earliest"}) public void receivePE(ConsumerRecord consumerRecord) { final GenericRecord value = consumerRecord.value(); @@ -224,4 +244,4 @@ public void receivePE(ConsumerRecord consumerRecord) { throw new RuntimeException(e); } } -} \ No newline at end of file +} diff --git a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java index e9fb5887e29cc..ff5b951092070 100644 --- a/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java +++ b/metadata-service/schema-registry-servlet/src/test/java/io/datahubproject/openapi/test/SchemaRegistryControllerTestConfiguration.java @@ -4,10 +4,7 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.test.context.TestPropertySource; - @TestConfiguration @TestPropertySource(value = "classpath:/application.properties") @ComponentScan(basePackages = {"com.linkedin.gms.factory.kafka", "com.linkedin.gms.factory.config"}) -public class SchemaRegistryControllerTestConfiguration { - -} +public class SchemaRegistryControllerTestConfiguration {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java index b622fc5bb6af2..caebc6a334e72 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventConstants.java @@ -1,8 +1,7 @@ package com.linkedin.metadata.datahubusage; public class DataHubUsageEventConstants { - private DataHubUsageEventConstants() { - } + private DataHubUsageEventConstants() {} // Common fields public static final String TYPE = "type"; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java index c1018e2031b17..518b5f28a5b99 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java @@ -2,7 +2,6 @@ import lombok.Getter; - @Getter public enum DataHubUsageEventType { PAGE_VIEW_EVENT("PageViewEvent"), diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java index 40a5e3a07ae6d..eab482c7bab27 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/AspectUtils.java @@ -25,57 +25,67 @@ import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTimeUtils; - @Slf4j public class AspectUtils { - private AspectUtils() { - } + private AspectUtils() {} - public static final Set SUPPORTED_TYPES = Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + public static final Set SUPPORTED_TYPES = + Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); public static List getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService, - boolean onPrimaryKeyInsertOnly) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService, + boolean onPrimaryKeyInsertOnly) { // No additional changes for unsupported operations if (!SUPPORTED_TYPES.contains(metadataChangeProposal.getChangeType())) { return Collections.emptyList(); } - final Urn urn = EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, + final Urn urn = + EntityKeyUtils.getUrnFromProposal( + metadataChangeProposal, entityService.getKeyAspectSpec(metadataChangeProposal.getEntityType())); final Map includedAspects; if (metadataChangeProposal.getChangeType() != ChangeType.PATCH) { - RecordTemplate aspectRecord = GenericRecordUtils.deserializeAspect(metadataChangeProposal.getAspect().getValue(), - metadataChangeProposal.getAspect().getContentType(), entityService.getEntityRegistry() - .getEntitySpec(urn.getEntityType()).getAspectSpec(metadataChangeProposal.getAspectName())); + RecordTemplate aspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeProposal.getAspect().getValue(), + metadataChangeProposal.getAspect().getContentType(), + entityService + .getEntityRegistry() + .getEntitySpec(urn.getEntityType()) + .getAspectSpec(metadataChangeProposal.getAspectName())); includedAspects = ImmutableMap.of(metadataChangeProposal.getAspectName(), aspectRecord); } else { includedAspects = ImmutableMap.of(); } if (onPrimaryKeyInsertOnly) { - return entityService.generateDefaultAspectsOnFirstWrite(urn, includedAspects) - .getValue() - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService + .generateDefaultAspectsOnFirstWrite(urn, includedAspects) + .getValue() + .stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } else { - return entityService.generateDefaultAspectsIfMissing(urn, includedAspects) - .stream() - .map(entry -> getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) - .filter(Objects::nonNull) - .collect(Collectors.toList()); + return entityService.generateDefaultAspectsIfMissing(urn, includedAspects).stream() + .map( + entry -> + getProposalFromAspect(entry.getKey(), entry.getValue(), metadataChangeProposal)) + .filter(Objects::nonNull) + .collect(Collectors.toList()); } } public static List getAdditionalChanges( - @Nonnull MetadataChangeProposal metadataChangeProposal, - @Nonnull EntityService entityService) { + @Nonnull MetadataChangeProposal metadataChangeProposal, + @Nonnull EntityService entityService) { return getAdditionalChanges(metadataChangeProposal, entityService, false); } @@ -85,12 +95,10 @@ public static Map batchGetLatestAspect( Set urns, String aspectName, EntityClient entityClient, - Authentication authentication) throws Exception { - final Map gmsResponse = entityClient.batchGetV2( - entity, - urns, - ImmutableSet.of(aspectName), - authentication); + Authentication authentication) + throws Exception { + final Map gmsResponse = + entityClient.batchGetV2(entity, urns, ImmutableSet.of(aspectName), authentication); final Map finalResult = new HashMap<>(); for (Urn urn : urns) { EntityResponse response = gmsResponse.get(urn); @@ -101,8 +109,8 @@ public static Map batchGetLatestAspect( return finalResult; } - private static MetadataChangeProposal getProposalFromAspect(String aspectName, RecordTemplate aspect, - MetadataChangeProposal original) { + private static MetadataChangeProposal getProposalFromAspect( + String aspectName, RecordTemplate aspect, MetadataChangeProposal original) { MetadataChangeProposal proposal = new MetadataChangeProposal(); GenericAspect genericAspect = GenericRecordUtils.serializeAspect(aspect); // Set net new fields @@ -110,7 +118,8 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R proposal.setAspectName(aspectName); // Set fields determined from original - // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an UPSERT + // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an + // UPSERT proposal.setChangeType(original.getChangeType()); if (ChangeType.PATCH.equals(proposal.getChangeType())) { proposal.setChangeType(ChangeType.UPSERT); @@ -128,7 +137,7 @@ private static MetadataChangeProposal getProposalFromAspect(String aspectName, R if (original.getAuditHeader() != null) { proposal.setAuditHeader(original.getAuditHeader()); } - + proposal.setEntityType(original.getEntityType()); return proposal; @@ -145,8 +154,11 @@ public static MetadataChangeProposal buildMetadataChangeProposal( return proposal; } - public static MetadataChangeProposal buildMetadataChangeProposal(@Nonnull String entityType, - @Nonnull RecordTemplate keyAspect, @Nonnull String aspectName, @Nonnull RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposal( + @Nonnull String entityType, + @Nonnull RecordTemplate keyAspect, + @Nonnull String aspectName, + @Nonnull RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityType(entityType); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(keyAspect)); @@ -162,4 +174,4 @@ public static AuditStamp getAuditStamp(Urn actor) { auditStamp.setActor(actor); return auditStamp; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java index 40284efe7ac82..3b71c698e0c9f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity; +import static com.linkedin.metadata.search.utils.QueryUtils.*; + import com.datahub.util.RecordUtils; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,7 +28,6 @@ import com.linkedin.metadata.run.RelatedAspectArray; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; - import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; @@ -36,375 +37,465 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; - import lombok.AllArgsConstructor; import lombok.Data; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.search.utils.QueryUtils.*; - - @Slf4j @RequiredArgsConstructor public class DeleteEntityService { - private final EntityService _entityService; - private final GraphService _graphService; - - private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; - - /** - * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is the entrypoint for - * addressing dangling pointers whenever a user deletes some entity. - * - * @param urn The urn for which to delete references in DataHub's metadata graph. - * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller simply wants a - * preview of the response. - * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting references to the provided - * urn. - */ - public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { - final DeleteReferencesResponse result = new DeleteReferencesResponse(); - RelatedEntitiesResult relatedEntities = - _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), null, - EMPTY_FILTER, - ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - - final List relatedAspects = relatedEntities.getEntities().stream() - .flatMap(relatedEntity -> getRelatedAspectStream(urn, UrnUtils.getUrn(relatedEntity.getUrn()), + private final EntityService _entityService; + private final GraphService _graphService; + + private static final Integer ELASTIC_BATCH_DELETE_SLEEP_SEC = 5; + + /** + * Public endpoint that deletes references to a given urn across DataHub's metadata graph. This is + * the entrypoint for addressing dangling pointers whenever a user deletes some entity. + * + * @param urn The urn for which to delete references in DataHub's metadata graph. + * @param dryRun Specifies if the delete logic should be executed to conclusion or if the caller + * simply wants a preview of the response. + * @return A {@link DeleteReferencesResponse} instance detailing the response of deleting + * references to the provided urn. + */ + public DeleteReferencesResponse deleteReferencesTo(final Urn urn, final boolean dryRun) { + final DeleteReferencesResponse result = new DeleteReferencesResponse(); + RelatedEntitiesResult relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + + final List relatedAspects = + relatedEntities.getEntities().stream() + .flatMap( + relatedEntity -> + getRelatedAspectStream( + urn, + UrnUtils.getUrn(relatedEntity.getUrn()), relatedEntity.getRelationshipType())) - .limit(10) - .collect(Collectors.toList()); - - result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); - result.setTotal(relatedEntities.getTotal()); - - if (dryRun) { - return result; - } - - for (int processedEntities = 0; processedEntities < relatedEntities.getTotal(); processedEntities += relatedEntities.getCount()) { - log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); - relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); - if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { - sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); - relatedEntities = _graphService.findRelatedEntities(null, newFilter("urn", urn.toString()), - null, EMPTY_FILTER, ImmutableList.of(), - newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), 0, 10000); - } - } - - return result; - } + .limit(10) + .collect(Collectors.toList()); - /** - * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. Used to give users of this API a summary of - * what aspects are related to a given urn and how. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream getRelatedAspectStream(Urn urn, Urn relatedUrn, String relationshipType) { - return getAspects(urn, relatedUrn, relationshipType).map(enrichedAspect -> { - final RelatedAspect relatedAspect = new RelatedAspect(); - relatedAspect.setEntity(relatedUrn); - relatedAspect.setRelationship(relationshipType); - relatedAspect.setAspect(enrichedAspect.getName()); - return relatedAspect; - }); - } + result.setRelatedAspects(new RelatedAspectArray(relatedAspects)); + result.setTotal(relatedEntities.getTotal()); - /** - * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with urn `urn` to - * another entity of urn `relatedUrn` via a concrete relationship type. - * - * @param urn The identifier of the source entity. - * @param relatedUrn The identifier of the destination entity. - * @param relationshipType The name of the relationship type that links urn to relatedUrn. - * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to relatedUrn. - */ - private Stream getAspects(Urn urn, Urn relatedUrn, String relationshipType) { - final String relatedEntityName = relatedUrn.getEntityType(); - final EntitySpec relatedEntitySpec = _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); - final Map aspectSpecs = getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); - - // If we have an empty map it means that we have a graph edge that points to some aspect spec that we can't find in - // the entity registry. It would be a corrupted edge in the graph index or backwards incompatible change in the - // entity registry (I.e: deleting the aspect from the metadata model without being consistent in the graph index). - if (aspectSpecs.isEmpty()) { - log.error("Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " - + "registry does not have relationships that the graph index has stored.", - relationshipType, relatedEntityName); - handleError(new DeleteEntityServiceError("Unable to find aspect spec in entity registry", - DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, - ImmutableMap.of("relatedEntityName", relatedEntityName, "relationshipType", relationshipType, - "relatedEntitySpec", relatedEntitySpec))); - return Stream.empty(); - } - - final List aspectList = getAspectsReferringTo(relatedUrn, aspectSpecs) - .collect(Collectors.toList()); - - // If we have an empty list it means that we have a graph edge that points to some aspect that we can't find in the - // entity service. It would be a corrupted edge in the graph index or corrupted record in the entity DB. - if (aspectList.isEmpty()) { - log.error("Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " - + "This is potentially a lack of consistency between the graph and entity DBs.", - urn, relatedUrn, relationshipType); - handleError(new DeleteEntityServiceError("Unable to find aspect instance in entity service", - DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, - ImmutableMap.of("urn", urn, "relatedUrn", relatedUrn, "relationship", relationshipType, - "aspectSpecs", aspectSpecs))); - return Stream.empty(); - } - - return aspectList.stream() - .filter(envelopedAspect -> hasRelationshipInstanceTo(envelopedAspect.getValue(), urn.getEntityType(), - relationshipType, aspectSpecs.get(envelopedAspect.getName()))) - .map(envelopedAspect -> new EnrichedAspect( - envelopedAspect.getName(), - envelopedAspect.getValue(), - aspectSpecs.get(envelopedAspect.getName())) - ); + if (dryRun) { + return result; } - /** - * Utility method to sleep the thread. - * - * @param seconds The number of seconds to sleep. - */ - private void sleep(final Integer seconds) { - try { - TimeUnit.SECONDS.sleep(seconds); - } catch (InterruptedException e) { - log.error("Interrupted sleep", e); - } + for (int processedEntities = 0; + processedEntities < relatedEntities.getTotal(); + processedEntities += relatedEntities.getCount()) { + log.info("Processing batch {} of {} aspects", processedEntities, relatedEntities.getTotal()); + relatedEntities.getEntities().forEach(entity -> deleteReference(urn, entity)); + if (processedEntities + relatedEntities.getEntities().size() < relatedEntities.getTotal()) { + sleep(ELASTIC_BATCH_DELETE_SLEEP_SEC); + relatedEntities = + _graphService.findRelatedEntities( + null, + newFilter("urn", urn.toString()), + null, + EMPTY_FILTER, + ImmutableList.of(), + newRelationshipFilter(EMPTY_FILTER, RelationshipDirection.INCOMING), + 0, + 10000); + } } - /** - * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link Urn}, removes said - * urn from the aspects and submits an MCP with the updated aspects. - * - * @param urn The urn to be found. - * @param relatedEntity The entity to be modified. - */ - private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { - final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); - final String relationshipType = relatedEntity.getRelationshipType(); - getAspects(urn, relatedUrn, relationshipType) - .forEach(enrichedAspect -> { - final String aspectName = enrichedAspect.getName(); - final Aspect aspect = enrichedAspect.getAspect(); - final AspectSpec aspectSpec = enrichedAspect.getSpec(); - - final AtomicReference updatedAspect; - try { - updatedAspect = new AtomicReference<>(aspect.copy()); - } catch (CloneNotSupportedException e) { - log.error("Failed to clone aspect {}", aspect); - handleError(new DeleteEntityServiceError("Failed to clone aspect", - DeleteEntityServiceErrorReason.CLONE_FAILED, - ImmutableMap.of("aspect", aspect))); - return; - } - - aspectSpec.getRelationshipFieldSpecs().stream() - .filter(relationshipFieldSpec -> relationshipFieldSpec.getRelationshipAnnotation().getName().equals(relationshipType)) - .forEach(relationshipFieldSpec -> { - final PathSpec path = relationshipFieldSpec.getPath(); - updatedAspect.set(DeleteEntityUtils.getAspectWithReferenceRemoved(urn.toString(), - updatedAspect.get(), aspectSpec.getPegasusSchema(), path)); - }); - - // If there has been an update, then we produce an MCE. - if (!aspect.equals(updatedAspect.get())) { - if (updatedAspect.get() == null) { - // Then we should remove the aspect. - deleteAspect(relatedUrn, aspectName, aspect); - } else { - // Then we should update the aspect. - updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); - } - } - }); + return result; + } + + /** + * Gets a stream of relatedAspects Pojos (high-level, trimmed information) that relate an entity + * with urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. Used to + * give users of this API a summary of what aspects are related to a given urn and how. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link RelatedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream getRelatedAspectStream( + Urn urn, Urn relatedUrn, String relationshipType) { + return getAspects(urn, relatedUrn, relationshipType) + .map( + enrichedAspect -> { + final RelatedAspect relatedAspect = new RelatedAspect(); + relatedAspect.setEntity(relatedUrn); + relatedAspect.setRelationship(relationshipType); + relatedAspect.setAspect(enrichedAspect.getName()); + return relatedAspect; + }); + } + + /** + * Gets a stream of Enriched Aspect Pojos (Aspect + aspect spec tuple) that relate an entity with + * urn `urn` to another entity of urn `relatedUrn` via a concrete relationship type. + * + * @param urn The identifier of the source entity. + * @param relatedUrn The identifier of the destination entity. + * @param relationshipType The name of the relationship type that links urn to relatedUrn. + * @return A stream of {@link EnrichedAspect} instances that have the relationship from urn to + * relatedUrn. + */ + private Stream getAspects(Urn urn, Urn relatedUrn, String relationshipType) { + final String relatedEntityName = relatedUrn.getEntityType(); + final EntitySpec relatedEntitySpec = + _entityService.getEntityRegistry().getEntitySpec(relatedEntityName); + final Map aspectSpecs = + getAspectSpecsReferringTo(urn.getEntityType(), relationshipType, relatedEntitySpec); + + // If we have an empty map it means that we have a graph edge that points to some aspect spec + // that we can't find in + // the entity registry. It would be a corrupted edge in the graph index or backwards + // incompatible change in the + // entity registry (I.e: deleting the aspect from the metadata model without being consistent in + // the graph index). + if (aspectSpecs.isEmpty()) { + log.error( + "Unable to find any aspect spec that has a {} relationship to {} entities. This means that the entity " + + "registry does not have relationships that the graph index has stored.", + relationshipType, + relatedEntityName); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect spec in entity registry", + DeleteEntityServiceErrorReason.ENTITY_REGISTRY_SPEC_NOT_FOUND, + ImmutableMap.of( + "relatedEntityName", + relatedEntityName, + "relationshipType", + relationshipType, + "relatedEntitySpec", + relatedEntitySpec))); + return Stream.empty(); } - /** - * Delete an existing aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - */ - private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { - final RollbackResult rollbackResult = _entityService.deleteAspect(urn.toString(), aspectName, - new HashMap<>(), true); - if (rollbackResult == null || rollbackResult.getNewValue() != null) { - log.error("Failed to delete aspect with references. Before {}, after: null, please check GMS logs" - + " logs for more information", prevAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, - ImmutableMap.of("urn", urn, "aspectName", aspectName))); - } + final List aspectList = + getAspectsReferringTo(relatedUrn, aspectSpecs).collect(Collectors.toList()); + + // If we have an empty list it means that we have a graph edge that points to some aspect that + // we can't find in the + // entity service. It would be a corrupted edge in the graph index or corrupted record in the + // entity DB. + if (aspectList.isEmpty()) { + log.error( + "Unable to find an aspect instance that relates {} {} via relationship {} in the entity service. " + + "This is potentially a lack of consistency between the graph and entity DBs.", + urn, + relatedUrn, + relationshipType); + handleError( + new DeleteEntityServiceError( + "Unable to find aspect instance in entity service", + DeleteEntityServiceErrorReason.ENTITY_SERVICE_ASPECT_NOT_FOUND, + ImmutableMap.of( + "urn", + urn, + "relatedUrn", + relatedUrn, + "relationship", + relationshipType, + "aspectSpecs", + aspectSpecs))); + return Stream.empty(); } - /** - * Update an aspect for an urn. - * - * @param urn the urn of the entity to remove the aspect for - * @param aspectName the aspect to remove - * @param prevAspect the old value for the aspect - * @param newAspect the new value for the aspect - */ - private void updateAspect(Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { - final MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(urn); - proposal.setChangeType(ChangeType.UPSERT); - proposal.setEntityType(urn.getEntityType()); - proposal.setAspectName(aspectName); - proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); - - final AuditStamp auditStamp = new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - final IngestResult ingestProposalResult = _entityService.ingestProposal(proposal, auditStamp, false); - - if (!ingestProposalResult.isSqlCommitted()) { - log.error("Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" - + " logs for more information", prevAspect, newAspect); - handleError(new DeleteEntityServiceError("Failed to ingest new aspect", - DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, - ImmutableMap.of("proposal", proposal))); - } + return aspectList.stream() + .filter( + envelopedAspect -> + hasRelationshipInstanceTo( + envelopedAspect.getValue(), + urn.getEntityType(), + relationshipType, + aspectSpecs.get(envelopedAspect.getName()))) + .map( + envelopedAspect -> + new EnrichedAspect( + envelopedAspect.getName(), + envelopedAspect.getValue(), + aspectSpecs.get(envelopedAspect.getName()))); + } + + /** + * Utility method to sleep the thread. + * + * @param seconds The number of seconds to sleep. + */ + private void sleep(final Integer seconds) { + try { + TimeUnit.SECONDS.sleep(seconds); + } catch (InterruptedException e) { + log.error("Interrupted sleep", e); } - - - /** - * Utility method that attempts to find Aspect information as well as the associated path spec for a given urn that - * has a relationship of type `relationType` to another urn. - * - * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a relationship - * to `urn`. - * @param aspectSpecs The entity spec of the related entity. - * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships between `urn` & `relatedUrn`. - */ - private Stream getAspectsReferringTo(final Urn relatedUrn, - final Map aspectSpecs) { - - // FIXME: Can we not depend on entity service? - final EntityResponse entityResponse; - try { - entityResponse = _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); - } catch (URISyntaxException e) { - log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); - return Stream.empty(); - } - // Find aspect which contains the relationship with the value we are looking for - return entityResponse - .getAspects() - .values() - .stream() - // Get aspects which contain the relationship field specs found above - .filter(Objects::nonNull) - .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Processes an aspect of a given {@link RelatedEntity} instance that references a given {@link + * Urn}, removes said urn from the aspects and submits an MCP with the updated aspects. + * + * @param urn The urn to be found. + * @param relatedEntity The entity to be modified. + */ + private void deleteReference(final Urn urn, final RelatedEntity relatedEntity) { + final Urn relatedUrn = UrnUtils.getUrn(relatedEntity.getUrn()); + final String relationshipType = relatedEntity.getRelationshipType(); + getAspects(urn, relatedUrn, relationshipType) + .forEach( + enrichedAspect -> { + final String aspectName = enrichedAspect.getName(); + final Aspect aspect = enrichedAspect.getAspect(); + final AspectSpec aspectSpec = enrichedAspect.getSpec(); + + final AtomicReference updatedAspect; + try { + updatedAspect = new AtomicReference<>(aspect.copy()); + } catch (CloneNotSupportedException e) { + log.error("Failed to clone aspect {}", aspect); + handleError( + new DeleteEntityServiceError( + "Failed to clone aspect", + DeleteEntityServiceErrorReason.CLONE_FAILED, + ImmutableMap.of("aspect", aspect))); + return; + } + + aspectSpec.getRelationshipFieldSpecs().stream() + .filter( + relationshipFieldSpec -> + relationshipFieldSpec + .getRelationshipAnnotation() + .getName() + .equals(relationshipType)) + .forEach( + relationshipFieldSpec -> { + final PathSpec path = relationshipFieldSpec.getPath(); + updatedAspect.set( + DeleteEntityUtils.getAspectWithReferenceRemoved( + urn.toString(), + updatedAspect.get(), + aspectSpec.getPegasusSchema(), + path)); + }); + + // If there has been an update, then we produce an MCE. + if (!aspect.equals(updatedAspect.get())) { + if (updatedAspect.get() == null) { + // Then we should remove the aspect. + deleteAspect(relatedUrn, aspectName, aspect); + } else { + // Then we should update the aspect. + updateAspect(relatedUrn, aspectName, aspect, updatedAspect.get()); + } + } + }); + } + + /** + * Delete an existing aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + */ + private void deleteAspect(Urn urn, String aspectName, RecordTemplate prevAspect) { + final RollbackResult rollbackResult = + _entityService.deleteAspect(urn.toString(), aspectName, new HashMap<>(), true); + if (rollbackResult == null || rollbackResult.getNewValue() != null) { + log.error( + "Failed to delete aspect with references. Before {}, after: null, please check GMS logs" + + " logs for more information", + prevAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.ASPECT_DELETE_FAILED, + ImmutableMap.of("urn", urn, "aspectName", aspectName))); } - - /** - * Utility method that determines whether a given aspect has an instance of a relationship of type relationType - * to a given entity type. - * - * @param aspect The aspect in which to search for the relationship. - * @param entityType The name of the entity the method checks against. - * @param relationType The name of the relationship to search for. - * @param aspectSpec The aspect spec in which to search for a concrete relationship with name=relationType - * and that targets the entityType passed by parameter. - * @return {@code True} if the aspect has a relationship with the intended conditions, {@code False} otherwise. - */ - private boolean hasRelationshipInstanceTo(final Aspect aspect, final String entityType, final String relationType, - final AspectSpec aspectSpec) { - - final RecordTemplate recordTemplate = RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), - aspect.data()); - - final Map> extractFields = FieldExtractor.extractFields(recordTemplate, - aspectSpec.getRelationshipFieldSpecs()); - - // Is there is any instance of the relationship specs defined in the aspect's spec extracted from the - // aspect record instance? - return findRelationshipFor(aspectSpec, relationType, entityType) - .map(extractFields::get) - .filter(Objects::nonNull) - .anyMatch(list -> !list.isEmpty()); + } + + /** + * Update an aspect for an urn. + * + * @param urn the urn of the entity to remove the aspect for + * @param aspectName the aspect to remove + * @param prevAspect the old value for the aspect + * @param newAspect the new value for the aspect + */ + private void updateAspect( + Urn urn, String aspectName, RecordTemplate prevAspect, RecordTemplate newAspect) { + final MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(urn); + proposal.setChangeType(ChangeType.UPSERT); + proposal.setEntityType(urn.getEntityType()); + proposal.setAspectName(aspectName); + proposal.setAspect(GenericRecordUtils.serializeAspect(newAspect)); + + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + final IngestResult ingestProposalResult = + _entityService.ingestProposal(proposal, auditStamp, false); + + if (!ingestProposalResult.isSqlCommitted()) { + log.error( + "Failed to ingest aspect with references removed. Before {}, after: {}, please check MCP processor" + + " logs for more information", + prevAspect, + newAspect); + handleError( + new DeleteEntityServiceError( + "Failed to ingest new aspect", + DeleteEntityServiceErrorReason.MCP_PROCESSOR_FAILED, + ImmutableMap.of("proposal", proposal))); } - - /** - * Computes the set of aspect specs of an entity that contain a relationship of a given name to a specific entity type. - * - * @param relatedEntityType The name of the entity. - * @param relationshipType The name of the relationship. - * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. - * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have a relationship of - * name relationshipType to the given relatedEntityType. - */ - private Map getAspectSpecsReferringTo(final String relatedEntityType, final String relationshipType, - final EntitySpec entitySpec) { - return entitySpec - .getAspectSpecMap() - .entrySet() - .stream() - .filter(entry -> findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType).findAny().isPresent()) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - /** - * Utility method to find the relationship specs within an AspectSpec with name relationshipName and which has - * relatedEntity name as a valid destination type. - * - * @param spec The aspect spec from which to extract relationship field specs. - * @param relationshipType The name of the relationship to find. - * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which the relationship - * is valid for. - * @return The list of relationship field specs which match the criteria. - */ - private Stream findRelationshipFor(final AspectSpec spec, final String relationshipType, - final String entityType) { - return spec.getRelationshipFieldSpecs().stream() - .filter(relationship -> relationship.getRelationshipName().equals(relationshipType) - && relationship.getValidDestinationTypes().contains(entityType)); - } - - /** - * Entrypoint to handle the various errors that may occur during the execution of the delete entity service. - * @param error The error instance that provides context on what issue occured. - */ - private void handleError(final DeleteEntityServiceError error) { - // NO-OP for now. - } - - @AllArgsConstructor - @Data - private static class DeleteEntityServiceError { - String message; - DeleteEntityServiceErrorReason reason; - Map context; - } - - private enum DeleteEntityServiceErrorReason { - ENTITY_SERVICE_ASPECT_NOT_FOUND, - ENTITY_REGISTRY_SPEC_NOT_FOUND, - MCP_PROCESSOR_FAILED, - ASPECT_DELETE_FAILED, - CLONE_FAILED, - } - - @AllArgsConstructor - @Data - private static class EnrichedAspect { - String name; - Aspect aspect; - AspectSpec spec; + } + + /** + * Utility method that attempts to find Aspect information as well as the associated path spec for + * a given urn that has a relationship of type `relationType` to another urn. + * + * @param relatedUrn The urn of the related entity in which we want to find the aspect that has a + * relationship to `urn`. + * @param aspectSpecs The entity spec of the related entity. + * @return A {@link Stream} of {@link EnvelopedAspect} instances that contain relationships + * between `urn` & `relatedUrn`. + */ + private Stream getAspectsReferringTo( + final Urn relatedUrn, final Map aspectSpecs) { + + // FIXME: Can we not depend on entity service? + final EntityResponse entityResponse; + try { + entityResponse = + _entityService.getEntityV2(relatedUrn.getEntityType(), relatedUrn, aspectSpecs.keySet()); + } catch (URISyntaxException e) { + log.error("Unable to retrieve entity data for relatedUrn " + relatedUrn, e); + return Stream.empty(); } + // Find aspect which contains the relationship with the value we are looking for + return entityResponse.getAspects().values().stream() + // Get aspects which contain the relationship field specs found above + .filter(Objects::nonNull) + .filter(aspect -> aspectSpecs.containsKey(aspect.getName())); + } + + /** + * Utility method that determines whether a given aspect has an instance of a relationship of type + * relationType to a given entity type. + * + * @param aspect The aspect in which to search for the relationship. + * @param entityType The name of the entity the method checks against. + * @param relationType The name of the relationship to search for. + * @param aspectSpec The aspect spec in which to search for a concrete relationship with + * name=relationType and that targets the entityType passed by parameter. + * @return {@code True} if the aspect has a relationship with the intended conditions, {@code + * False} otherwise. + */ + private boolean hasRelationshipInstanceTo( + final Aspect aspect, + final String entityType, + final String relationType, + final AspectSpec aspectSpec) { + + final RecordTemplate recordTemplate = + RecordUtils.toRecordTemplate(aspectSpec.getDataTemplateClass(), aspect.data()); + + final Map> extractFields = + FieldExtractor.extractFields(recordTemplate, aspectSpec.getRelationshipFieldSpecs()); + + // Is there is any instance of the relationship specs defined in the aspect's spec extracted + // from the + // aspect record instance? + return findRelationshipFor(aspectSpec, relationType, entityType) + .map(extractFields::get) + .filter(Objects::nonNull) + .anyMatch(list -> !list.isEmpty()); + } + + /** + * Computes the set of aspect specs of an entity that contain a relationship of a given name to a + * specific entity type. + * + * @param relatedEntityType The name of the entity. + * @param relationshipType The name of the relationship. + * @param entitySpec The entity spec from which to retrieve the aspect specs, if any. + * @return A filtered dictionary of aspect name to aspect specs containing only aspects that have + * a relationship of name relationshipType to the given relatedEntityType. + */ + private Map getAspectSpecsReferringTo( + final String relatedEntityType, final String relationshipType, final EntitySpec entitySpec) { + return entitySpec.getAspectSpecMap().entrySet().stream() + .filter( + entry -> + findRelationshipFor(entry.getValue(), relationshipType, relatedEntityType) + .findAny() + .isPresent()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + /** + * Utility method to find the relationship specs within an AspectSpec with name relationshipName + * and which has relatedEntity name as a valid destination type. + * + * @param spec The aspect spec from which to extract relationship field specs. + * @param relationshipType The name of the relationship to find. + * @param entityType The name of the entity type (i.e: dataset, chart, usergroup, etc...) which + * the relationship is valid for. + * @return The list of relationship field specs which match the criteria. + */ + private Stream findRelationshipFor( + final AspectSpec spec, final String relationshipType, final String entityType) { + return spec.getRelationshipFieldSpecs().stream() + .filter( + relationship -> + relationship.getRelationshipName().equals(relationshipType) + && relationship.getValidDestinationTypes().contains(entityType)); + } + + /** + * Entrypoint to handle the various errors that may occur during the execution of the delete + * entity service. + * + * @param error The error instance that provides context on what issue occured. + */ + private void handleError(final DeleteEntityServiceError error) { + // NO-OP for now. + } + + @AllArgsConstructor + @Data + private static class DeleteEntityServiceError { + String message; + DeleteEntityServiceErrorReason reason; + Map context; + } + + private enum DeleteEntityServiceErrorReason { + ENTITY_SERVICE_ASPECT_NOT_FOUND, + ENTITY_REGISTRY_SPEC_NOT_FOUND, + MCP_PROCESSOR_FAILED, + ASPECT_DELETE_FAILED, + CLONE_FAILED, + } + + @AllArgsConstructor + @Data + private static class EnrichedAspect { + String name; + Aspect aspect; + AspectSpec spec; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java index 58b5341c4ae0c..0a8b5880e5bce 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityUtils.java @@ -13,43 +13,45 @@ import java.util.ListIterator; import lombok.extern.slf4j.Slf4j; - /** - * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to remove a single value - * following a concrete set of rules. + * Utility class that encapsulates the logic on how to modify a {@link RecordTemplate} in place to + * remove a single value following a concrete set of rules. * - * It does this by a recursive tree traversal method, based on an aspect path provided for a given aspect. - * This so that we don't have to scan the entire aspect for the value to be removed and then figure out how to apply - * logical rules based on upstream optionality definitions. + *

It does this by a recursive tree traversal method, based on an aspect path provided for a + * given aspect. This so that we don't have to scan the entire aspect for the value to be removed + * and then figure out how to apply logical rules based on upstream optionality definitions. * - * For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, DataSchema, PathSpec)} + *

For more information see {@link #getAspectWithReferenceRemoved(String, RecordTemplate, + * DataSchema, PathSpec)} */ @Slf4j public class DeleteEntityUtils { - private DeleteEntityUtils() { } + private DeleteEntityUtils() {} /** - * Utility method that removes fields from a given aspect based on its aspect spec that follows the following logic: - * - * 1. If field is optional and not part of an array → remove the field. - * 2. If is a field that is part of an array (has an `*` in the path spec) - * → go up to the nearest array and remove the element. - * Extra → If array only has 1 element which is being deleted→ optional rules (if optional set null, otherwise delete) - * 3. If field is non-optional and does not belong to an array delete if and only if aspect becomes empty. + * Utility method that removes fields from a given aspect based on its aspect spec that follows + * the following logic: * - * @param value Value to be removed from Aspect. - * @param aspect Aspect in which the value property exists. - * @param schema {@link DataSchema} of the aspect being processed. - * @param aspectPath Path within the aspect to where the value can be found. + *

1. If field is optional and not part of an array → remove the field. 2. If is a field that + * is part of an array (has an `*` in the path spec) → go up to the nearest array and remove the + * element. Extra → If array only has 1 element which is being deleted→ optional rules (if + * optional set null, otherwise delete) 3. If field is non-optional and does not belong to an + * array delete if and only if aspect becomes empty. * - * @return A deep copy of the aspect. Modified if the value was found and according to the logic specified above. - * Otherwise, a copy of the original aspect is returned. + * @param value Value to be removed from Aspect. + * @param aspect Aspect in which the value property exists. + * @param schema {@link DataSchema} of the aspect being processed. + * @param aspectPath Path within the aspect to where the value can be found. + * @return A deep copy of the aspect. Modified if the value was found and according to the logic + * specified above. Otherwise, a copy of the original aspect is returned. */ - public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { + public static Aspect getAspectWithReferenceRemoved( + String value, RecordTemplate aspect, DataSchema schema, PathSpec aspectPath) { try { final DataMap copy = aspect.copy().data(); - final DataComplex newValue = removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); + final DataComplex newValue = + removeValueBasedOnPath(value, schema, copy, aspectPath.getPathComponents(), 0); if (newValue == null) { // If the new value is null, we should remove the aspect. return null; @@ -63,49 +65,54 @@ public static Aspect getAspectWithReferenceRemoved(String value, RecordTemplate /** * This method chooses which sub method to invoke based on the path being iterated on. * - * @param value The value to be removed from the data complex object. Merely propagated down in this method. - * @param schema The schema of the data complex being visited. Merely propagated down in this method. - * @param o The data complex instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data complex object. Merely propagated down in + * this method. + * @param schema The schema of the data complex being visited. Merely propagated down in this + * method. + * @param o The data complex instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueBasedOnPath(String value, DataSchema schema, DataComplex o, List pathComponents, - int index) { + private static DataComplex removeValueBasedOnPath( + String value, DataSchema schema, DataComplex o, List pathComponents, int index) { final String subPath = pathComponents.get(index); // Processing an array if (subPath.equals("*")) { // Process each entry - return removeValueFromArray(value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); + return removeValueFromArray( + value, (ArrayDataSchema) schema, (DataList) o, pathComponents, index); } else { // Processing a map - return removeValueFromMap(value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); + return removeValueFromMap( + value, (RecordDataSchema) schema, (DataMap) o, pathComponents, index); } } /** - * This method is used to visit and remove values from DataMap instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataMap instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + *

Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data map object. - * @param spec The schema of the data complex being visited. Used to get information of the optionallity of - * the data map being processed. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data map object. + * @param spec The schema of the data complex being visited. Used to get information of the + * optionallity of the data map being processed. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromMap(String value, RecordDataSchema spec, DataMap record, List pathComponents, - int index) { + private static DataComplex removeValueFromMap( + String value, RecordDataSchema spec, DataMap record, List pathComponents, int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { boolean canDelete = spec.getField(pathComponents.get(index)).getOptional(); - boolean valueExistsInRecord = record.getOrDefault(pathComponents.get(index), "").equals(value); + boolean valueExistsInRecord = + record.getOrDefault(pathComponents.get(index), "").equals(value); if (valueExistsInRecord) { if (canDelete) { record.remove(pathComponents.get(index)); @@ -114,17 +121,26 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe return null; } } else { - log.error("[Reference removal logic] Unable to find value {} in data map {} at path {}", value, record, - pathComponents.subList(0, index)); + log.error( + "[Reference removal logic] Unable to find value {} in data map {} at path {}", + value, + record, + pathComponents.subList(0, index)); } } else { // else traverse further down the tree. final String key = pathComponents.get(index); final boolean optionalField = spec.getField(key).getOptional(); - // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: iterating for an array) + // Check if key exists, this may not exist because you are in wrong branch of the tree (i.e: + // iterating for an array) if (record.containsKey(key)) { - final DataComplex result = removeValueBasedOnPath(value, spec.getField(key).getType(), (DataComplex) record.get(key), pathComponents, - index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, + spec.getField(key).getType(), + (DataComplex) record.get(key), + pathComponents, + index + 1); if (result == null) { if (optionalField) { @@ -134,7 +150,9 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } else { // Not optional and not the only field, then this is a bad delete. Need to throw. throw new UnsupportedOperationException( - String.format("Delete failed! Failed to field with name %s from DataMap. The field is required!", key)); + String.format( + "Delete failed! Failed to field with name %s from DataMap. The field is required!", + key)); } } else { record.put(key, result); @@ -145,32 +163,40 @@ private static DataComplex removeValueFromMap(String value, RecordDataSchema spe } /** - * This method is used to visit and remove values from DataList instances if they are the leaf nodes of the original - * data complex object. + * This method is used to visit and remove values from DataList instances if they are the leaf + * nodes of the original data complex object. * - * Note that this method has side effects and mutates the provided DataMap instance. + *

Note that this method has side effects and mutates the provided DataMap instance. * - * @param value The value to be removed from the data list object. - * @param record The data list instance with the actual data being visited. - * @param pathComponents The list of sub-strings representing the path from the root of the data complex to the value - * to be removed. - * @param index The current index in the list of path components the method is meant to process. - * @return The updated DataComplex object without the value given as input. + * @param value The value to be removed from the data list object. + * @param record The data list instance with the actual data being visited. + * @param pathComponents The list of sub-strings representing the path from the root of the data + * complex to the value to be removed. + * @param index The current index in the list of path components the method is meant to process. + * @return The updated DataComplex object without the value given as input. */ - private static DataComplex removeValueFromArray(String value, ArrayDataSchema record, DataList aspectList, - List pathComponents, int index) { + private static DataComplex removeValueFromArray( + String value, + ArrayDataSchema record, + DataList aspectList, + List pathComponents, + int index) { // If in the last component of the path spec if (index == pathComponents.size() - 1) { final boolean found = aspectList.remove(value); if (!found) { - log.error(String.format("Unable to find value %s in aspect list %s at path %s", value, aspectList, - pathComponents.subList(0, index))); + log.error( + String.format( + "Unable to find value %s in aspect list %s at path %s", + value, aspectList, pathComponents.subList(0, index))); } } else { // else traverse further down the tree. final ListIterator it = aspectList.listIterator(); while (it.hasNext()) { final Object aspect = it.next(); - final DataComplex result = removeValueBasedOnPath(value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); + final DataComplex result = + removeValueBasedOnPath( + value, record.getItems(), (DataComplex) aspect, pathComponents, index + 1); if (result == null) { it.remove(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index b7607053df8e3..8654df4435cd6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -33,11 +33,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntityService { /** * Just whether the entity/aspect exists + * * @param urn urn for the entity * @param aspectName aspect for the entity * @return exists or not @@ -45,35 +45,37 @@ public interface EntityService { Boolean exists(Urn urn, String aspectName); /** - * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided - * set of aspect names. + * Retrieves the latest aspects corresponding to a batch of {@link Urn}s based on a provided set + * of aspect names. * * @param urns set of urns to fetch aspects for * @param aspectNames aspects to fetch for each urn in urns set * @return a map of provided {@link Urn} to a List containing the requested aspects. */ Map> getLatestAspects( - @Nonnull final Set urns, - @Nonnull final Set aspectNames); + @Nonnull final Set urns, @Nonnull final Set aspectNames); - Map getLatestAspectsForUrn(@Nonnull final Urn urn, @Nonnull final Set aspectNames); + Map getLatestAspectsForUrn( + @Nonnull final Urn urn, @Nonnull final Set aspectNames); /** * Retrieves an aspect having a specific {@link Urn}, name, & version. * - * Note that once we drop support for legacy aspect-specific resources, - * we should make this a protected method. Only visible for backwards compatibility. + *

Note that once we drop support for legacy aspect-specific resources, we should make this a + * protected method. Only visible for backwards compatibility. * * @param urn an urn associated with the requested aspect * @param aspectName name of the aspect requested * @param version specific version of the aspect being requests - * @return the {@link RecordTemplate} representation of the requested aspect object, or null if one cannot be found + * @return the {@link RecordTemplate} representation of the requested aspect object, or null if + * one cannot be found */ - RecordTemplate getAspect(@Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); + RecordTemplate getAspect( + @Nonnull final Urn urn, @Nonnull final String aspectName, @Nonnull long version); /** - * Retrieves the latest aspects for the given urn as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given urn as dynamic aspect objects (Without having to + * define union objects) * * @param entityName name of the entity to fetch * @param urn urn of entity to fetch @@ -83,11 +85,12 @@ Map> getLatestAspects( EntityResponse getEntityV2( @Nonnull final String entityName, @Nonnull final Urn urn, - @Nonnull final Set aspectNames) throws URISyntaxException; + @Nonnull final Set aspectNames) + throws URISyntaxException; /** - * Retrieves the latest aspects for the given set of urns as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the latest aspects for the given set of urns as dynamic aspect objects (Without + * having to define union objects) * * @param entityName name of the entity to fetch * @param urns set of urns to fetch @@ -97,19 +100,21 @@ EntityResponse getEntityV2( Map getEntitiesV2( @Nonnull final String entityName, @Nonnull final Set urns, - @Nonnull final Set aspectNames) throws URISyntaxException; + @Nonnull final Set aspectNames) + throws URISyntaxException; /** - * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects - * (Without having to define union objects) + * Retrieves the aspects for the given set of urns and versions as dynamic aspect objects (Without + * having to define union objects) * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link Entity} object */ Map getEntitiesVersionedV2( - @Nonnull final Set versionedUrns, - @Nonnull final Set aspectNames) throws URISyntaxException; + @Nonnull final Set versionedUrns, @Nonnull final Set aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects @@ -121,20 +126,20 @@ Map getEntitiesVersionedV2( */ Map> getLatestEnvelopedAspects( // TODO: entityName is unused, can we remove this as a param? - @Nonnull String entityName, - @Nonnull Set urns, - @Nonnull Set aspectNames) throws URISyntaxException; + @Nonnull String entityName, @Nonnull Set urns, @Nonnull Set aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspects for the given set of urns as a list of enveloped aspects * - * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized string + * @param versionedUrns set of urns to fetch with versions of aspects specified in a specialized + * string * @param aspectNames set of aspects to fetch * @return a map of {@link Urn} to {@link EnvelopedAspect} object */ Map> getVersionedEnvelopedAspects( - @Nonnull Set versionedUrns, - @Nonnull Set aspectNames) throws URISyntaxException; + @Nonnull Set versionedUrns, @Nonnull Set aspectNames) + throws URISyntaxException; /** * Retrieves the latest aspect for the given urn as a list of enveloped aspects @@ -145,9 +150,8 @@ Map> getVersionedEnvelopedAspects( * @return {@link EnvelopedAspect} object, or null if one cannot be found */ EnvelopedAspect getLatestEnvelopedAspect( - @Nonnull final String entityName, - @Nonnull final Urn urn, - @Nonnull final String aspectName) throws Exception; + @Nonnull final String entityName, @Nonnull final Urn urn, @Nonnull final String aspectName) + throws Exception; @Deprecated VersionedAspect getVersionedAspect(@Nonnull Urn urn, @Nonnull String aspectName, long version); @@ -158,19 +162,27 @@ ListResult listLatestAspects( final int start, final int count); - List ingestAspects(@Nonnull final Urn urn, @Nonnull List> aspectRecordsToIngest, - @Nonnull final AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + List ingestAspects( + @Nonnull final Urn urn, + @Nonnull List> aspectRecordsToIngest, + @Nonnull final AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); - List ingestAspects(@Nonnull final AspectsBatch aspectsBatch, @Nonnull final AuditStamp auditStamp, - boolean emitMCL, boolean overwrite); + List ingestAspects( + @Nonnull final AspectsBatch aspectsBatch, + @Nonnull final AuditStamp auditStamp, + boolean emitMCL, + boolean overwrite); /** - * Ingests (inserts) a new version of an entity aspect & emits a {@link com.linkedin.mxe.MetadataAuditEvent}. + * Ingests (inserts) a new version of an entity aspect & emits a {@link + * com.linkedin.mxe.MetadataAuditEvent}. * - * This method runs a read -> write atomically in a single transaction, this is to prevent multiple IDs from being created. + *

This method runs a read -> write atomically in a single transaction, this is to prevent + * multiple IDs from being created. * - * Note that in general, this should not be used externally. It is currently serving upgrade scripts and - * is as such public. + *

Note that in general, this should not be used externally. It is currently serving upgrade + * scripts and is as such public. * * @param urn an urn associated with the new aspect * @param aspectName name of the aspect being inserted @@ -179,17 +191,22 @@ List ingestAspects(@Nonnull final AspectsBatch aspectsBatch, * @param systemMetadata * @return the {@link RecordTemplate} representation of the written aspect object */ - RecordTemplate ingestAspectIfNotPresent(@Nonnull Urn urn, @Nonnull String aspectName, - @Nonnull RecordTemplate newValue, @Nonnull AuditStamp auditStamp, @Nullable SystemMetadata systemMetadata); + RecordTemplate ingestAspectIfNotPresent( + @Nonnull Urn urn, + @Nonnull String aspectName, + @Nonnull RecordTemplate newValue, + @Nonnull AuditStamp auditStamp, + @Nullable SystemMetadata systemMetadata); // TODO: Why not in RetentionService? - String batchApplyRetention(Integer start, Integer count, Integer attemptWithVersion, String aspectName, - String urn); + String batchApplyRetention( + Integer start, Integer count, Integer attemptWithVersion, String aspectName, String urn); Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLike); // TODO: Extract this to a different service, doesn't need to be here - RestoreIndicesResult restoreIndices(@Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger); + RestoreIndicesResult restoreIndices( + @Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger); ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count); @@ -199,63 +216,76 @@ String batchApplyRetention(Integer start, Integer count, Integer attemptWithVers @Deprecated Map getEntities(@Nonnull final Set urns, @Nonnull Set aspectNames); - Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, AspectSpec aspectSpec, + Pair, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + AspectSpec aspectSpec, @Nonnull final MetadataChangeLog metadataChangeLog); - Pair, Boolean> alwaysProduceMCLAsync(@Nonnull final Urn urn, @Nonnull String entityName, @Nonnull String aspectName, - @Nonnull final AspectSpec aspectSpec, @Nullable final RecordTemplate oldAspectValue, - @Nullable final RecordTemplate newAspectValue, @Nullable final SystemMetadata oldSystemMetadata, - @Nullable final SystemMetadata newSystemMetadata, @Nonnull AuditStamp auditStamp, - @Nonnull final ChangeType changeType); + Pair, Boolean> alwaysProduceMCLAsync( + @Nonnull final Urn urn, + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull final AspectSpec aspectSpec, + @Nullable final RecordTemplate oldAspectValue, + @Nullable final RecordTemplate newAspectValue, + @Nullable final SystemMetadata oldSystemMetadata, + @Nullable final SystemMetadata newSystemMetadata, + @Nonnull AuditStamp auditStamp, + @Nonnull final ChangeType changeType); RecordTemplate getLatestAspect(@Nonnull final Urn urn, @Nonnull final String aspectName); @Deprecated - void ingestEntities(@Nonnull final List entities, @Nonnull final AuditStamp auditStamp, + void ingestEntities( + @Nonnull final List entities, + @Nonnull final AuditStamp auditStamp, @Nonnull final List systemMetadata); @Deprecated SystemMetadata ingestEntity(Entity entity, AuditStamp auditStamp); @Deprecated - void ingestEntity(@Nonnull Entity entity, @Nonnull AuditStamp auditStamp, + void ingestEntity( + @Nonnull Entity entity, + @Nonnull AuditStamp auditStamp, @Nonnull SystemMetadata systemMetadata); void setRetentionService(RetentionService retentionService); AspectSpec getKeyAspectSpec(@Nonnull final Urn urn); - Optional getAspectSpec(@Nonnull final String entityName, @Nonnull final String aspectName); + Optional getAspectSpec( + @Nonnull final String entityName, @Nonnull final String aspectName); String getKeyAspectName(@Nonnull final Urn urn); /** * Generate default aspects if not present in the database. + * * @param urn entity urn * @param includedAspects aspects being written * @return additional aspects to be written */ - List> generateDefaultAspectsIfMissing(@Nonnull final Urn urn, - Map includedAspects); + List> generateDefaultAspectsIfMissing( + @Nonnull final Urn urn, Map includedAspects); /** - * Generate default aspects if the entity key aspect is NOT in the database **AND** - * the key aspect is being written, present in `includedAspects`. - * - * Does not automatically create key aspects. - * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + * Generate default aspects if the entity key aspect is NOT in the database **AND** the key aspect + * is being written, present in `includedAspects`. * - * This version is more efficient in that it only generates additional writes - * when a new entity is being minted for the first time. The drawback is that it will not automatically - * add key aspects, in case the producer is not bothering to ensure that the entity exists - * before writing non-key aspects. + *

Does not automatically create key aspects. * + * @see EntityService#generateDefaultAspectsIfMissing if key aspects need autogeneration + *

This version is more efficient in that it only generates additional writes when a new + * entity is being minted for the first time. The drawback is that it will not automatically + * add key aspects, in case the producer is not bothering to ensure that the entity exists + * before writing non-key aspects. * @param urn entity urn * @param includedAspects aspects being written * @return whether key aspect exists in database and the additional aspects to be written */ - Pair>> generateDefaultAspectsOnFirstWrite(@Nonnull final Urn urn, - Map includedAspects); + Pair>> generateDefaultAspectsOnFirstWrite( + @Nonnull final Urn urn, Map includedAspects); AspectSpec getKeyAspectSpec(@Nonnull final String entityName); @@ -263,24 +293,30 @@ Pair>> generateDefaultAspectsOnFirstW EntityRegistry getEntityRegistry(); - RollbackResult deleteAspect(String urn, String aspectName, @Nonnull Map conditions, boolean hardDelete); + RollbackResult deleteAspect( + String urn, String aspectName, @Nonnull Map conditions, boolean hardDelete); RollbackRunResult deleteUrn(Urn urn); - RollbackRunResult rollbackRun(List aspectRows, String runId, boolean hardDelete); + RollbackRunResult rollbackRun( + List aspectRows, String runId, boolean hardDelete); - RollbackRunResult rollbackWithConditions(List aspectRows, Map conditions, boolean hardDelete); + RollbackRunResult rollbackWithConditions( + List aspectRows, Map conditions, boolean hardDelete); - Set ingestProposal(AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); + Set ingestProposal( + AspectsBatch aspectsBatch, AuditStamp auditStamp, final boolean async); /** * If you have more than 1 proposal use the {AspectsBatch} method + * * @param proposal the metadata proposal to ingest * @param auditStamp audit information * @param async async ingestion or sync ingestion * @return ingestion result */ - IngestResult ingestProposal(MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); + IngestResult ingestProposal( + MetadataChangeProposal proposal, AuditStamp auditStamp, final boolean async); Boolean exists(Urn urn); @@ -293,15 +329,17 @@ Pair>> generateDefaultAspectsOnFirstW /** * Builds the default browse path V2 aspects for all entities. * - * This method currently supports datasets, charts, dashboards, and data jobs best. Everything else - * will have a basic "Default" folder added to their browsePathV2. + *

This method currently supports datasets, charts, dashboards, and data jobs best. Everything + * else will have a basic "Default" folder added to their browsePathV2. */ @Nonnull - BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) throws URISyntaxException; + BrowsePathsV2 buildDefaultBrowsePathV2(final @Nonnull Urn urn, boolean useContainerPaths) + throws URISyntaxException; /** - * Allow internal use of the system entity client. Solves recursive dependencies between the EntityService - * and the SystemJavaEntityClient + * Allow internal use of the system entity client. Solves recursive dependencies between the + * EntityService and the SystemJavaEntityClient + * * @param systemEntityClient system entity client */ void setSystemEntityClient(SystemEntityClient systemEntityClient); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java index 27c51e050deff..1ef818559faae 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestProposalResult.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import lombok.Value; - @Value public class IngestProposalResult { Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java index 5e4ed6259a7f7..3e72a763fb17c 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/IngestResult.java @@ -8,11 +8,11 @@ @Builder(toBuilder = true) @Value public class IngestResult { - Urn urn; - AbstractBatchItem request; - boolean publishedMCL; - boolean processedMCL; - boolean publishedMCP; - boolean sqlCommitted; - boolean isUpdate; // update else insert + Urn urn; + AbstractBatchItem request; + boolean publishedMCL; + boolean processedMCL; + boolean publishedMCP; + boolean sqlCommitted; + boolean isUpdate; // update else insert } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java index 21b07e59a2bf0..e6bf82b764484 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/ListResult.java @@ -7,7 +7,8 @@ import lombok.Value; /** - * An immutable value class that holds the result of a list operation and other pagination information. + * An immutable value class that holds the result of a list operation and other pagination + * information. * * @param the result type */ diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java index 1cdd9965c4bfc..51519f48bd975 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RetentionService.java @@ -25,18 +25,16 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.Builder; import lombok.SneakyThrows; import lombok.Value; - /** * Service coupled with an {@link EntityServiceImpl} to handle aspect record retention. * - * TODO: This class is abstract with storage-specific implementations. It'd be nice to pull storage and retention - * concerns apart, let (into {@link AspectDao}) deal with storage, and merge all retention concerns into a single - * class. + *

TODO: This class is abstract with storage-specific implementations. It'd be nice to pull + * storage and retention concerns apart, let (into {@link AspectDao}) deal with storage, and merge + * all retention concerns into a single class. */ public abstract class RetentionService { protected static final String ALL = "*"; @@ -44,8 +42,8 @@ public abstract class RetentionService { protected abstract EntityService getEntityService(); /** - * Fetch retention policies given the entityName and aspectName - * Uses the entity service to fetch the latest retention policies set for the input entity and aspect + * Fetch retention policies given the entityName and aspectName Uses the entity service to fetch + * the latest retention policies set for the input entity and aspect * * @param entityName Name of the entity * @param aspectName Name of the aspect @@ -55,19 +53,24 @@ public Retention getRetention(@Nonnull String entityName, @Nonnull String aspect // Prioritized list of retention keys to fetch List retentionUrns = getRetentionKeys(entityName, aspectName); Map> fetchedAspects = - getEntityService().getLatestAspects(new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); + getEntityService() + .getLatestAspects( + new HashSet<>(retentionUrns), ImmutableSet.of(Constants.DATAHUB_RETENTION_ASPECT)); // Find the first retention info that is set among the prioritized list of retention keys above - Optional retentionInfo = retentionUrns.stream() - .flatMap(urn -> fetchedAspects.getOrDefault(urn, Collections.emptyList()) - .stream() - .filter(aspect -> aspect instanceof DataHubRetentionConfig)) - .map(retention -> (DataHubRetentionConfig) retention) - .findFirst(); + Optional retentionInfo = + retentionUrns.stream() + .flatMap( + urn -> + fetchedAspects.getOrDefault(urn, Collections.emptyList()).stream() + .filter(aspect -> aspect instanceof DataHubRetentionConfig)) + .map(retention -> (DataHubRetentionConfig) retention) + .findFirst(); return retentionInfo.map(DataHubRetentionConfig::getRetention).orElse(new Retention()); } // Get list of datahub retention keys that match the input entity name and aspect name - protected static List getRetentionKeys(@Nonnull String entityName, @Nonnull String aspectName) { + protected static List getRetentionKeys( + @Nonnull String entityName, @Nonnull String aspectName) { return ImmutableList.of( new DataHubRetentionKey().setEntityName(entityName).setAspectName(aspectName), new DataHubRetentionKey().setEntityName(entityName).setAspectName(ALL), @@ -79,22 +82,26 @@ protected static List getRetentionKeys(@Nonnull String entityName, @Nonnull } /** - * Set retention policy for given entity and aspect. If entity or aspect names are null, the policy is set as default + * Set retention policy for given entity and aspect. If entity or aspect names are null, the + * policy is set as default * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will be the default for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will be the default for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will be the + * default for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will be the + * default for any aspects without specified policy * @param retentionConfig Retention policy */ @SneakyThrows - public boolean setRetention(@Nullable String entityName, @Nullable String aspectName, - @Nonnull DataHubRetentionConfig retentionConfig) { + public boolean setRetention( + @Nullable String entityName, + @Nullable String aspectName, + @Nonnull DataHubRetentionConfig retentionConfig) { validateRetention(retentionConfig.getRetention()); DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); MetadataChangeProposal keyProposal = new MetadataChangeProposal(); GenericAspect keyAspect = GenericRecordUtils.serializeAspect(retentionKey); @@ -110,11 +117,13 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect aspectProposal.setAspectName(Constants.DATAHUB_RETENTION_ASPECT); AuditStamp auditStamp = - new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); AspectsBatch batch = buildAspectsBatch(List.of(keyProposal, aspectProposal)); return getEntityService().ingestProposal(batch, auditStamp, false).stream() - .anyMatch(IngestResult::isSqlCommitted); + .anyMatch(IngestResult::isSqlCommitted); } protected abstract AspectsBatch buildAspectsBatch(List mcps); @@ -122,28 +131,31 @@ public boolean setRetention(@Nullable String entityName, @Nullable String aspect /** * Delete the retention policy set for given entity and aspect. * - * @param entityName Entity name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any entities without specified policy - * @param aspectName Aspect name to apply policy to. If null, set as "*", - * meaning it will delete the default policy for any aspects without specified policy + * @param entityName Entity name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any entities without specified policy + * @param aspectName Aspect name to apply policy to. If null, set as "*", meaning it will delete + * the default policy for any aspects without specified policy */ public void deleteRetention(@Nullable String entityName, @Nullable String aspectName) { DataHubRetentionKey retentionKey = new DataHubRetentionKey(); retentionKey.setEntityName(entityName != null ? entityName : ALL); retentionKey.setAspectName(aspectName != null ? aspectName : ALL); - Urn retentionUrn = EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); + Urn retentionUrn = + EntityKeyUtils.convertEntityKeyToUrn(retentionKey, Constants.DATAHUB_RETENTION_ENTITY); getEntityService().deleteUrn(retentionUrn); } private void validateRetention(Retention retention) { if (retention.hasVersion()) { if (retention.getVersion().getMaxVersions() <= 0) { - throw new IllegalArgumentException("Invalid maxVersions: " + retention.getVersion().getMaxVersions()); + throw new IllegalArgumentException( + "Invalid maxVersions: " + retention.getVersion().getMaxVersions()); } } if (retention.hasTime()) { if (retention.getTime().getMaxAgeInSeconds() <= 0) { - throw new IllegalArgumentException("Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); + throw new IllegalArgumentException( + "Invalid maxAgeInSeconds: " + retention.getTime().getMaxAgeInSeconds()); } } } @@ -151,33 +163,39 @@ private void validateRetention(Retention retention) { /** * Apply retention policies given the urn and aspect name * - * @param retentionContexts urn, aspect name, and additional context that could be used to apply retention + * @param retentionContexts urn, aspect name, and additional context that could be used to apply + * retention */ public void applyRetentionWithPolicyDefaults(@Nonnull List retentionContexts) { - List withDefaults = retentionContexts.stream() - .map(context -> { - if (context.getRetentionPolicy().isEmpty()) { - Retention retentionPolicy = getRetention(context.getUrn().getEntityType(), context.getAspectName()); - return context.toBuilder() + List withDefaults = + retentionContexts.stream() + .map( + context -> { + if (context.getRetentionPolicy().isEmpty()) { + Retention retentionPolicy = + getRetention(context.getUrn().getEntityType(), context.getAspectName()); + return context.toBuilder() .retentionPolicy(Optional.of(retentionPolicy)) .build(); - } else { - return context; - } - }) - .filter(context -> context.getRetentionPolicy().isPresent() - && !context.getRetentionPolicy().get().data().isEmpty()) + } else { + return context; + } + }) + .filter( + context -> + context.getRetentionPolicy().isPresent() + && !context.getRetentionPolicy().get().data().isEmpty()) .collect(Collectors.toList()); applyRetention(withDefaults); } /** - * Apply retention policies given the urn and aspect name and policies. This protected - * method assumes that the policy is provided, however we likely need to fetch these - * from system configuration. + * Apply retention policies given the urn and aspect name and policies. This protected method + * assumes that the policy is provided, however we likely need to fetch these from system + * configuration. * - * Users of this should use {@link #applyRetentionWithPolicyDefaults(List)}) + *

Users of this should use {@link #applyRetentionWithPolicyDefaults(List)}) * * @param retentionContexts Additional context that could be used to apply retention */ @@ -189,23 +207,19 @@ public void applyRetentionWithPolicyDefaults(@Nonnull List ret * @param entityName Name of the entity to apply retention to. If null, applies to all entities * @param aspectName Name of the aspect to apply retention to. If null, applies to all aspects */ - public abstract void batchApplyRetention(@Nullable String entityName, @Nullable String aspectName); + public abstract void batchApplyRetention( + @Nullable String entityName, @Nullable String aspectName); - /** - * Batch apply retention to all records within the start, end count - */ - public abstract BulkApplyRetentionResult batchApplyRetentionEntities(@Nonnull BulkApplyRetentionArgs args); + /** Batch apply retention to all records within the start, end count */ + public abstract BulkApplyRetentionResult batchApplyRetentionEntities( + @Nonnull BulkApplyRetentionArgs args); @Value @Builder(toBuilder = true) public static class RetentionContext { - @Nonnull - Urn urn; - @Nonnull - String aspectName; - @Builder.Default - Optional retentionPolicy = Optional.empty(); - @Builder.Default - Optional maxVersion = Optional.empty(); + @Nonnull Urn urn; + @Nonnull String aspectName; + @Builder.Default Optional retentionPolicy = Optional.empty(); + @Builder.Default Optional maxVersion = Optional.empty(); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java index 76a12a67b3aaf..9955a58c65339 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackResult.java @@ -1,13 +1,11 @@ package com.linkedin.metadata.entity; import com.linkedin.common.urn.Urn; - import com.linkedin.data.template.RecordTemplate; import com.linkedin.events.metadata.ChangeType; import com.linkedin.mxe.SystemMetadata; import lombok.Value; - @Value public class RollbackResult { public Urn urn; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java index 02776b7de4d09..a8c558df77e57 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/RollbackRunResult.java @@ -4,7 +4,6 @@ import java.util.List; import lombok.Value; - @Value public class RollbackRunResult { public List rowsRolledBack; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java index 06199814d30dd..a10c90bc45320 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/UpdateAspectResult.java @@ -6,12 +6,10 @@ import com.linkedin.metadata.entity.transactions.AbstractBatchItem; import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.SystemMetadata; +import java.util.concurrent.Future; import lombok.Builder; import lombok.Value; -import java.util.concurrent.Future; - - @Builder(toBuilder = true) @Value public class UpdateAspectResult { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java index 64511325d96d2..d8fcbe0b7d44d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesArgs.java @@ -4,51 +4,51 @@ @Data public class RestoreIndicesArgs implements Cloneable { - public int start = 0; - public int batchSize = 10; - public int numThreads = 1; - public long batchDelayMs = 1; - public String aspectName; - public String urn; - public String urnLike; - - @Override - public RestoreIndicesArgs clone() { - try { - RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); - // TODO: copy mutable state here, so the clone can't change the internals of the original - return clone; - } catch (CloneNotSupportedException e) { - throw new AssertionError(); - } + public int start = 0; + public int batchSize = 10; + public int numThreads = 1; + public long batchDelayMs = 1; + public String aspectName; + public String urn; + public String urnLike; + + @Override + public RestoreIndicesArgs clone() { + try { + RestoreIndicesArgs clone = (RestoreIndicesArgs) super.clone(); + // TODO: copy mutable state here, so the clone can't change the internals of the original + return clone; + } catch (CloneNotSupportedException e) { + throw new AssertionError(); } - - public RestoreIndicesArgs setAspectName(String aspectName) { - this.aspectName = aspectName; - return this; - } - - public RestoreIndicesArgs setUrnLike(String urnLike) { - this.urnLike = urnLike; - return this; - } - - public RestoreIndicesArgs setUrn(String urn) { - this.urn = urn; - return this; - } - - public RestoreIndicesArgs setStart(Integer start) { - if (start != null) { - this.start = start; - } - return this; + } + + public RestoreIndicesArgs setAspectName(String aspectName) { + this.aspectName = aspectName; + return this; + } + + public RestoreIndicesArgs setUrnLike(String urnLike) { + this.urnLike = urnLike; + return this; + } + + public RestoreIndicesArgs setUrn(String urn) { + this.urn = urn; + return this; + } + + public RestoreIndicesArgs setStart(Integer start) { + if (start != null) { + this.start = start; } + return this; + } - public RestoreIndicesArgs setBatchSize(Integer batchSize) { - if (batchSize != null) { - this.batchSize = batchSize; - } - return this; + public RestoreIndicesArgs setBatchSize(Integer batchSize) { + if (batchSize != null) { + this.batchSize = batchSize; } + return this; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index b7917d87f99fc..8479338660db0 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -4,13 +4,13 @@ @Data public class RestoreIndicesResult { - public int ignored = 0; - public int rowsMigrated = 0; - public long timeSqlQueryMs = 0; - public long timeGetRowMs = 0; - public long timeUrnMs = 0; - public long timeEntityRegistryCheckMs = 0; - public long aspectCheckMs = 0; - public long createRecordMs = 0; - public long sendMessageMs = 0; + public int ignored = 0; + public int rowsMigrated = 0; + public long timeSqlQueryMs = 0; + public long timeGetRowMs = 0; + public long timeUrnMs = 0; + public long timeEntityRegistryCheckMs = 0; + public long aspectCheckMs = 0; + public long createRecordMs = 0; + public long sendMessageMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java index 0d9126026b9c8..89e337771752f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionArgs.java @@ -4,9 +4,9 @@ @Data public class BulkApplyRetentionArgs { - public Integer start; - public Integer count; - public Integer attemptWithVersion; - public String aspectName; - public String urn; + public Integer start; + public Integer count; + public Integer attemptWithVersion; + public String aspectName; + public String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java index ef032496c8451..c84c7364534fc 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/retention/BulkApplyRetentionResult.java @@ -4,13 +4,13 @@ @Data public class BulkApplyRetentionResult { - public long argStart; - public long argCount; - public long argAttemptWithVersion; - public String argUrn; - public String argAspectName; - public long rowsHandled = 0; - public long timeRetentionPolicyMapMs; - public long timeRowMs; - public long timeApplyRetentionMs = 0; + public long argStart; + public long argCount; + public long argAttemptWithVersion; + public String argUrn; + public String argAspectName; + public long rowsHandled = 0; + public long timeRetentionPolicyMapMs; + public long timeRowMs; + public long timeApplyRetentionMs = 0; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java index 03a2b4e2a7f73..155385c62ecef 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AbstractBatchItem.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.entity.transactions; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.models.AspectSpec; @@ -8,85 +10,85 @@ import com.linkedin.metadata.models.registry.template.AspectTemplateEngine; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; - import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public abstract class AbstractBatchItem { - // urn an urn associated with the new aspect - public abstract Urn getUrn(); + // urn an urn associated with the new aspect + public abstract Urn getUrn(); - // aspectName name of the aspect being inserted - public abstract String getAspectName(); + // aspectName name of the aspect being inserted + public abstract String getAspectName(); - public abstract SystemMetadata getSystemMetadata(); + public abstract SystemMetadata getSystemMetadata(); - public abstract ChangeType getChangeType(); + public abstract ChangeType getChangeType(); - public abstract EntitySpec getEntitySpec(); + public abstract EntitySpec getEntitySpec(); - public abstract AspectSpec getAspectSpec(); + public abstract AspectSpec getAspectSpec(); - public abstract MetadataChangeProposal getMetadataChangeProposal(); + public abstract MetadataChangeProposal getMetadataChangeProposal(); - public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); + public abstract void validateUrn(EntityRegistry entityRegistry, Urn urn); - @Nonnull - protected static SystemMetadata generateSystemMetadataIfEmpty(@Nullable SystemMetadata systemMetadata) { - if (systemMetadata == null) { - systemMetadata = new SystemMetadata(); - systemMetadata.setRunId(DEFAULT_RUN_ID); - systemMetadata.setLastObserved(System.currentTimeMillis()); - } - return systemMetadata; + @Nonnull + protected static SystemMetadata generateSystemMetadataIfEmpty( + @Nullable SystemMetadata systemMetadata) { + if (systemMetadata == null) { + systemMetadata = new SystemMetadata(); + systemMetadata.setRunId(DEFAULT_RUN_ID); + systemMetadata.setLastObserved(System.currentTimeMillis()); } + return systemMetadata; + } - protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { - if (!mcp.hasAspectName() || !mcp.hasAspect()) { - throw new UnsupportedOperationException("Aspect and aspect name is required for create and update operations"); - } - - AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); + protected static AspectSpec validateAspect(MetadataChangeProposal mcp, EntitySpec entitySpec) { + if (!mcp.hasAspectName() || !mcp.hasAspect()) { + throw new UnsupportedOperationException( + "Aspect and aspect name is required for create and update operations"); + } - if (aspectSpec == null) { - throw new RuntimeException( - String.format("Unknown aspect %s for entity %s", mcp.getAspectName(), - mcp.getEntityType())); - } + AspectSpec aspectSpec = entitySpec.getAspectSpec(mcp.getAspectName()); - return aspectSpec; + if (aspectSpec == null) { + throw new RuntimeException( + String.format( + "Unknown aspect %s for entity %s", mcp.getAspectName(), mcp.getEntityType())); } - /** - * Validates that a change type is valid for the given aspect - * @param changeType - * @param aspectSpec - * @return - */ - protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { - if (aspectSpec.isTimeseries()) { - // Timeseries aspects only support UPSERT - return ChangeType.UPSERT.equals(changeType); - } else { - if (ChangeType.PATCH.equals(changeType)) { - return supportsPatch(aspectSpec); - } else { - return ChangeType.UPSERT.equals(changeType); - } - } + return aspectSpec; + } + + /** + * Validates that a change type is valid for the given aspect + * + * @param changeType + * @param aspectSpec + * @return + */ + protected static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { + if (aspectSpec.isTimeseries()) { + // Timeseries aspects only support UPSERT + return ChangeType.UPSERT.equals(changeType); + } else { + if (ChangeType.PATCH.equals(changeType)) { + return supportsPatch(aspectSpec); + } else { + return ChangeType.UPSERT.equals(changeType); + } } - - protected static boolean supportsPatch(AspectSpec aspectSpec) { - // Limit initial support to defined templates - if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { - // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, - // specifically having array based fields that require merging without specifying merge behavior can get into bad states - throw new UnsupportedOperationException("Aspect: " + aspectSpec.getName() + " does not currently support patch " - + "operations."); - } - return true; + } + + protected static boolean supportsPatch(AspectSpec aspectSpec) { + // Limit initial support to defined templates + if (!AspectTemplateEngine.SUPPORTED_TEMPLATES.contains(aspectSpec.getName())) { + // Prevent unexpected behavior for aspects that do not currently have 1st class patch support, + // specifically having array based fields that require merging without specifying merge + // behavior can get into bad states + throw new UnsupportedOperationException( + "Aspect: " + aspectSpec.getName() + " does not currently support patch " + "operations."); } + return true; + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java index 1d3da08130071..4f2cf6073bdac 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/transactions/AspectsBatch.java @@ -5,18 +5,22 @@ import java.util.Set; import java.util.stream.Collectors; - public interface AspectsBatch { - List getItems(); + List getItems(); - default boolean containsDuplicateAspects() { - return getItems().stream().map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) - .distinct().count() != getItems().size(); - } + default boolean containsDuplicateAspects() { + return getItems().stream() + .map(i -> String.format("%s_%s", i.getClass().getName(), i.hashCode())) + .distinct() + .count() + != getItems().size(); + } - default Map> getUrnAspectsMap() { - return getItems().stream() - .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) - .collect(Collectors.groupingBy(Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); - } + default Map> getUrnAspectsMap() { + return getItems().stream() + .map(aspect -> Map.entry(aspect.getUrn().toString(), aspect.getAspectName())) + .collect( + Collectors.groupingBy( + Map.Entry::getKey, Collectors.mapping(Map.Entry::getValue, Collectors.toSet()))); + } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java index 56bddba5dc0fa..d27b0ed303972 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/Edge.java @@ -1,30 +1,20 @@ package com.linkedin.metadata.graph; import com.linkedin.common.urn.Urn; +import java.util.Map; import lombok.AllArgsConstructor; import lombok.Data; - -import java.util.Map; import lombok.EqualsAndHashCode; - @Data @AllArgsConstructor public class Edge { - @EqualsAndHashCode.Include - private Urn source; - @EqualsAndHashCode.Include - private Urn destination; - @EqualsAndHashCode.Include - private String relationshipType; - @EqualsAndHashCode.Exclude - private Long createdOn; - @EqualsAndHashCode.Exclude - private Urn createdActor; - @EqualsAndHashCode.Exclude - private Long updatedOn; - @EqualsAndHashCode.Exclude - private Urn updatedActor; - @EqualsAndHashCode.Exclude - private Map properties; + @EqualsAndHashCode.Include private Urn source; + @EqualsAndHashCode.Include private Urn destination; + @EqualsAndHashCode.Include private String relationshipType; + @EqualsAndHashCode.Exclude private Long createdOn; + @EqualsAndHashCode.Exclude private Urn createdActor; + @EqualsAndHashCode.Exclude private Long updatedOn; + @EqualsAndHashCode.Exclude private Urn updatedActor; + @EqualsAndHashCode.Exclude private Map properties; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java index d47d1e12cceb0..96a711d3875b3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphClient.java @@ -6,13 +6,12 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface GraphClient { public static final Integer DEFAULT_PAGE_SIZE = 100; /** - * Returns a list of related entities for a given entity, set of edge types, and direction relative to the - * source node + * Returns a list of related entities for a given entity, set of edge types, and direction + * relative to the source node */ @Nonnull EntityRelationships getRelatedEntities( @@ -24,10 +23,15 @@ EntityRelationships getRelatedEntities( String actor); /** - * Returns lineage relationships for given entity in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * Returns lineage relationships for given entity in the DataHub graph. Lineage relationship + * denotes whether an entity is directly upstream or downstream of another entity */ @Nonnull - EntityLineageResult getLineageEntities(String rawUrn, LineageDirection direction, @Nullable Integer start, - @Nullable Integer count, int maxHops, String actor); + EntityLineageResult getLineageEntities( + String rawUrn, + LineageDirection direction, + @Nullable Integer start, + @Nullable Integer count, + int maxHops, + String actor); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java index 3b47f244086a9..cb4eadb8824d5 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphFilters.java @@ -5,7 +5,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class GraphFilters { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java index 2bbf2af1437d8..2afe907399745 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphIndexUtils.java @@ -18,10 +18,11 @@ @Slf4j public class GraphIndexUtils { - private GraphIndexUtils() { } + private GraphIndexUtils() {} @Nullable - private static List getActorList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List getActorList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -31,7 +32,8 @@ private static List getActorList(@Nullable final String path, @Nonnull fina } @Nullable - private static List getTimestampList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List getTimestampList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -41,7 +43,8 @@ private static List getTimestampList(@Nullable final String path, @Nonnull } @Nullable - private static List> getPropertiesList(@Nullable final String path, @Nonnull final RecordTemplate aspect) { + private static List> getPropertiesList( + @Nullable final String path, @Nonnull final RecordTemplate aspect) { if (path == null) { return null; } @@ -50,10 +53,9 @@ private static List> getPropertiesList(@Nullable final Strin return (List>) value; } - - @Nullable - private static boolean isValueListValid(@Nullable final List entryList, final int valueListSize) { + private static boolean isValueListValid( + @Nullable final List entryList, final int valueListSize) { if (entryList == null) { return false; } @@ -64,7 +66,8 @@ private static boolean isValueListValid(@Nullable final List entryList, final } @Nullable - private static Long getTimestamp(@Nullable final List timestampList, final int index, final int valueListSize) { + private static Long getTimestamp( + @Nullable final List timestampList, final int index, final int valueListSize) { if (isValueListValid(timestampList, valueListSize)) { return timestampList.get(index); } @@ -72,7 +75,8 @@ private static Long getTimestamp(@Nullable final List timestampList, final } @Nullable - private static Urn getActor(@Nullable final List actorList, final int index, final int valueListSize) { + private static Urn getActor( + @Nullable final List actorList, final int index, final int valueListSize) { if (isValueListValid(actorList, valueListSize)) { return actorList.get(index); } @@ -80,7 +84,10 @@ private static Urn getActor(@Nullable final List actorList, final int index } @Nullable - private static Map getProperties(@Nullable final List> propertiesList, final int index, final int valueListSize) { + private static Map getProperties( + @Nullable final List> propertiesList, + final int index, + final int valueListSize) { if (isValueListValid(propertiesList, valueListSize)) { return propertiesList.get(index); } @@ -88,8 +95,8 @@ private static Map getProperties(@Nullable final List extractGraphEdges( @@ -97,14 +104,18 @@ public static List extractGraphEdges( @Nonnull final RecordTemplate aspect, @Nonnull final Urn urn, @Nonnull final MetadataChangeLog event, - @Nonnull final boolean isNewAspectVersion - ) { + @Nonnull final boolean isNewAspectVersion) { final List edgesToAdd = new ArrayList<>(); - final String createdOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); - final String createdActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); - final String updatedOnPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); - final String updatedActorPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); - final String propertiesPath = extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); + final String createdOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedOn(); + final String createdActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getCreatedActor(); + final String updatedOnPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedOn(); + final String updatedActorPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getUpdatedActor(); + final String propertiesPath = + extractedFieldsEntry.getKey().getRelationshipAnnotation().getProperties(); final List createdOnList = getTimestampList(createdOnPath, aspect); final List createdActorList = getActorList(createdActorPath, aspect); @@ -114,27 +125,33 @@ public static List extractGraphEdges( int index = 0; for (Object fieldValue : extractedFieldsEntry.getValue()) { - Long createdOn = createdOnList != null - ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn createdActor = createdActorList != null - ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) - : null; - Long updatedOn = updatedOnList != null - ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) - : null; - Urn updatedActor = updatedActorList != null - ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) - : null; - final Map properties = propertiesList != null - ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) - : null; + Long createdOn = + createdOnList != null + ? getTimestamp(createdOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn createdActor = + createdActorList != null + ? getActor(createdActorList, index, extractedFieldsEntry.getValue().size()) + : null; + Long updatedOn = + updatedOnList != null + ? getTimestamp(updatedOnList, index, extractedFieldsEntry.getValue().size()) + : null; + Urn updatedActor = + updatedActorList != null + ? getActor(updatedActorList, index, extractedFieldsEntry.getValue().size()) + : null; + final Map properties = + propertiesList != null + ? getProperties(propertiesList, index, extractedFieldsEntry.getValue().size()) + : null; SystemMetadata systemMetadata; if (isNewAspectVersion) { systemMetadata = event.hasSystemMetadata() ? event.getSystemMetadata() : null; } else { - systemMetadata = event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; + systemMetadata = + event.hasPreviousSystemMetadata() ? event.getPreviousSystemMetadata() : null; } if ((createdOn == null || createdOn == 0) && systemMetadata != null) { @@ -160,9 +177,7 @@ public static List extractGraphEdges( createdActor, updatedOn, updatedActor, - properties - ) - ); + properties)); } catch (URISyntaxException e) { log.error("Invalid destination urn: {}", fieldValue, e); } @@ -183,7 +198,6 @@ public static Edge mergeEdges(@Nonnull final Edge oldEdge, @Nonnull final Edge n null, newEdge.getUpdatedOn(), newEdge.getUpdatedActor(), - newEdge.getProperties() - ); + newEdge.getProperties()); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java index 6f0ac4bc2f904..b3e713a906d01 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/GraphService.java @@ -18,9 +18,7 @@ import org.apache.commons.collections.CollectionUtils; public interface GraphService { - /** - * Return lineage registry to construct graph index - */ + /** Return lineage registry to construct graph index */ LineageRegistry getLineageRegistry(); /** @@ -29,157 +27,207 @@ public interface GraphService { void addEdge(final Edge edge); /** - * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do not exist. + * Adds or updates an edge to the graph. This creates the source and destination nodes, if they do + * not exist. */ void upsertEdge(final Edge edge); /** * Remove an edge from the graph. + * * @param edge the edge to delete */ void removeEdge(final Edge edge); /** - * Find related entities (nodes) connected to a source entity via edges of given relationship types. Related entities - * can be filtered by source and destination type (use `null` for any type), by source and destination entity filter - * and relationship filter. Pagination of the result is controlled via `offset` and `count`. + * Find related entities (nodes) connected to a source entity via edges of given relationship + * types. Related entities can be filtered by source and destination type (use `null` for any + * type), by source and destination entity filter and relationship filter. Pagination of the + * result is controlled via `offset` and `count`. * - * Starting from a node as the source entity, determined by `sourceType` and `sourceEntityFilter`, - * related entities are found along the direction of edges (`RelationshipDirection.OUTGOING`) or in opposite - * direction of edges (`RelationshipDirection.INCOMING`). The destination entities are further filtered by `destinationType` - * and `destinationEntityFilter`, and then returned as related entities. + *

Starting from a node as the source entity, determined by `sourceType` and + * `sourceEntityFilter`, related entities are found along the direction of edges + * (`RelationshipDirection.OUTGOING`) or in opposite direction of edges + * (`RelationshipDirection.INCOMING`). The destination entities are further filtered by + * `destinationType` and `destinationEntityFilter`, and then returned as related entities. * - * This does not return duplicate related entities, even if entities are connected to source entities via multiple edges. - * An empty list of relationship types returns an empty result. + *

This does not return duplicate related entities, even if entities are connected to source + * entities via multiple edges. An empty list of relationship types returns an empty result. * - * In other words, the source and destination entity is not to be understood as the source and destination of the edge, - * but as the source and destination of "finding related entities", where always the destination entities are returned. - * This understanding is important when it comes to `RelationshipDirection.INCOMING`. The origin of the edge becomes - * the destination entity and the source entity is where the edge points to. + *

In other words, the source and destination entity is not to be understood as the source and + * destination of the edge, but as the source and destination of "finding related entities", where + * always the destination entities are returned. This understanding is important when it comes to + * `RelationshipDirection.INCOMING`. The origin of the edge becomes the destination entity and the + * source entity is where the edge points to. * - * Example I: - * dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three + *

Example I: dataset one --DownstreamOf-> dataset two --DownstreamOf-> dataset three * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + *

findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset two") - RelatedEntity("DownstreamOf", "dataset three") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") + *

findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") * - * Example II: - * dataset one --HasOwner-> user one + *

Example II: dataset one --HasOwner-> user one * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - * - RelatedEntity("HasOwner", "user one") + *

findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.OUTGOING), 0, 100) - + * RelatedEntity("HasOwner", "user one") * - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - * - RelatedEntity("HasOwner", "dataset one") + *

findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["HasOwner"], + * RelationshipFilter.setDirection(RelationshipDirection.INCOMING), 0, 100) - + * RelatedEntity("HasOwner", "dataset one") * - * Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` + *

Calling this method with {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). * - * Example III: - * findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - * - RelatedEntity("DownstreamOf", "dataset one") - * - RelatedEntity("DownstreamOf", "dataset two") - * - RelatedEntity("DownstreamOf", "dataset three") + *

Example III: findRelatedEntities(null, EMPTY_FILTER, null, EMPTY_FILTER, ["DownstreamOf"], + * RelationshipFilter.setDirection(RelationshipDirection.UNDIRECTED), 0, 100) - + * RelatedEntity("DownstreamOf", "dataset one") - RelatedEntity("DownstreamOf", "dataset two") - + * RelatedEntity("DownstreamOf", "dataset three") */ @Nonnull - RelatedEntitiesResult findRelatedEntities(@Nullable final List sourceTypes, @Nonnull final Filter sourceEntityFilter, - @Nullable final List destinationTypes, @Nonnull final Filter destinationEntityFilter, - @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter, - final int offset, final int count); - + RelatedEntitiesResult findRelatedEntities( + @Nullable final List sourceTypes, + @Nonnull final Filter sourceEntityFilter, + @Nullable final List destinationTypes, + @Nonnull final Filter destinationEntityFilter, + @Nonnull final List relationshipTypes, + @Nonnull final RelationshipFilter relationshipFilter, + final int offset, + final int count); /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + *

Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, - maxHops - ); + maxHops); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops Abstracts + * away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + *

Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, int offset, - int count, int maxHops, @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, + @Nullable Long endTimeMillis) { return getLineage( entityUrn, direction, - new GraphFilters(new ArrayList(getLineageRegistry().getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), + new GraphFilters( + new ArrayList( + getLineageRegistry() + .getEntitiesWithLineageToEntityType(entityUrn.getEntityType()))), offset, count, maxHops, startTimeMillis, - endTimeMillis - ); + endTimeMillis); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + *

Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops) { + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops) { return getLineage(entityUrn, direction, graphFilters, offset, count, maxHops, null, null); } /** - * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If entityTypes is not empty, - * will only return edges to entities that are within the entity types set. - * Abstracts away the concept of relationship types + * Traverse from the entityUrn towards the input direction up to maxHops number of hops. If + * entityTypes is not empty, will only return edges to entities that are within the entity types + * set. Abstracts away the concept of relationship types * - * Unless overridden, it uses the lineage registry to fetch valid edge types and queries for them + *

Unless overridden, it uses the lineage registry to fetch valid edge types and queries for + * them */ @Nonnull - default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageDirection direction, - GraphFilters graphFilters, int offset, int count, int maxHops, @Nullable Long startTimeMillis, + default EntityLineageResult getLineage( + @Nonnull Urn entityUrn, + @Nonnull LineageDirection direction, + GraphFilters graphFilters, + int offset, + int count, + int maxHops, + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { if (maxHops > 1) { maxHops = 1; } List edgesToFetch = getLineageRegistry().getLineageRelationships(entityUrn.getEntityType(), direction); - Map> edgesByDirection = edgesToFetch.stream() - .collect(Collectors.partitioningBy(edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); - EntityLineageResult result = new EntityLineageResult().setStart(offset) - .setCount(count) - .setRelationships(new LineageRelationshipArray()) - .setTotal(0); + Map> edgesByDirection = + edgesToFetch.stream() + .collect( + Collectors.partitioningBy( + edgeInfo -> edgeInfo.getDirection() == RelationshipDirection.OUTGOING)); + EntityLineageResult result = + new EntityLineageResult() + .setStart(offset) + .setCount(count) + .setRelationships(new LineageRelationshipArray()) + .setTotal(0); Set visitedUrns = new HashSet<>(); // Outgoing edges if (!CollectionUtils.isEmpty(edgesByDirection.get(true))) { List relationshipTypes = new ArrayList( - edgesByDirection.get(true).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toSet())); + edgesByDirection.get(true).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toSet())); // Fetch outgoing edges RelatedEntitiesResult outgoingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.OUTGOING), + offset, count); // Update offset and count to fetch the correct number of incoming edges below @@ -187,39 +235,59 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD count = Math.max(0, count - outgoingEdges.getEntities().size()); result.setTotal(result.getTotal() + outgoingEdges.getTotal()); - outgoingEdges.getEntities().forEach(entity -> { - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + outgoingEdges + .getEntities() + .forEach( + entity -> { + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } // Incoming edges if (!CollectionUtils.isEmpty(edgesByDirection.get(false))) { List relationshipTypes = - edgesByDirection.get(false).stream().map(LineageRegistry.EdgeInfo::getType).collect(Collectors.toList()); + edgesByDirection.get(false).stream() + .map(LineageRegistry.EdgeInfo::getType) + .collect(Collectors.toList()); RelatedEntitiesResult incomingEdges = - findRelatedEntities(null, QueryUtils.newFilter("urn", entityUrn.toString()), graphFilters.getAllowedEntityTypes(), + findRelatedEntities( + null, + QueryUtils.newFilter("urn", entityUrn.toString()), + graphFilters.getAllowedEntityTypes(), QueryUtils.EMPTY_FILTER, - relationshipTypes, QueryUtils.newRelationshipFilter(QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), offset, + relationshipTypes, + QueryUtils.newRelationshipFilter( + QueryUtils.EMPTY_FILTER, RelationshipDirection.INCOMING), + offset, count); result.setTotal(result.getTotal() + incomingEdges.getTotal()); - incomingEdges.getEntities().forEach(entity -> { - if (visitedUrns.contains(entity.getUrn())) { - return; - } - visitedUrns.add(entity.getUrn()); - try { - result.getRelationships() - .add(new LineageRelationship().setEntity(Urn.createFromString(entity.getUrn())) - .setType(entity.getRelationshipType())); - } catch (URISyntaxException ignored) { - } - }); + incomingEdges + .getEntities() + .forEach( + entity -> { + if (visitedUrns.contains(entity.getUrn())) { + return; + } + visitedUrns.add(entity.getUrn()); + try { + result + .getRelationships() + .add( + new LineageRelationship() + .setEntity(Urn.createFromString(entity.getUrn())) + .setType(entity.getRelationshipType())); + } catch (URISyntaxException ignored) { + } + }); } return result; @@ -231,26 +299,26 @@ default EntityLineageResult getLineage(@Nonnull Urn entityUrn, @Nonnull LineageD void removeNode(@Nonnull final Urn urn); /** - * Removes edges of the given relationship types from the given node after applying the relationship filter. + * Removes edges of the given relationship types from the given node after applying the + * relationship filter. * - * An empty list of relationship types removes nothing from the node. + *

An empty list of relationship types removes nothing from the node. * - * Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in `relationshipFilter` - * is equivalent to the union of `OUTGOING` and `INCOMING` (without duplicates). + *

Calling this method with a {@link RelationshipDirection} `UNDIRECTED` in + * `relationshipFilter` is equivalent to the union of `OUTGOING` and `INCOMING` (without + * duplicates). */ - void removeEdgesFromNode(@Nonnull final Urn urn, @Nonnull final List relationshipTypes, + void removeEdgesFromNode( + @Nonnull final Urn urn, + @Nonnull final List relationshipTypes, @Nonnull final RelationshipFilter relationshipFilter); void configure(); - /** - * Removes all edges and nodes from the graph. - */ + /** Removes all edges and nodes from the graph. */ void clear(); - /** - * Whether or not this graph service supports multi-hop - */ + /** Whether or not this graph service supports multi-hop */ default boolean supportsMultiHop() { return false; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java index 2975d100933fd..be1b55655f671 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/graph/RelatedEntity.java @@ -6,13 +6,9 @@ @AllArgsConstructor @Data public class RelatedEntity { - /** - * How the entity is related, along which edge. - */ + /** How the entity is related, along which edge. */ String relationshipType; - /** - * Urn associated with the related entity. - */ + /** Urn associated with the related entity. */ String urn; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java index 27cb7fdec22d3..5676dc9ebac54 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/RecommendationsService.java @@ -12,7 +12,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class RecommendationsService { @@ -28,16 +27,20 @@ public RecommendationsService( } private void validateRecommendationSources(final List candidateSources) { - final Map moduleIdCount = candidateSources.stream() - .collect(Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); - List moduleIdsWithDuplicates = moduleIdCount.entrySet() - .stream() - .filter(entry -> entry.getValue() > 1) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + final Map moduleIdCount = + candidateSources.stream() + .collect( + Collectors.groupingBy(RecommendationSource::getModuleId, Collectors.counting())); + List moduleIdsWithDuplicates = + moduleIdCount.entrySet().stream() + .filter(entry -> entry.getValue() > 1) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); if (!moduleIdsWithDuplicates.isEmpty()) { throw new IllegalArgumentException( - String.format("Found recommendations candidate sources with duplicate module IDs: %s", moduleIdsWithDuplicates.toString())); + String.format( + "Found recommendations candidate sources with duplicate module IDs: %s", + moduleIdsWithDuplicates.toString())); } } @@ -52,16 +55,23 @@ private void validateRecommendationSources(final List cand @Nonnull @WithSpan public List listRecommendations( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, - int limit) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext, int limit) { // Get recommendation candidates from sources which are eligible, in parallel - final List candidateModules = ConcurrencyUtils.transformAndCollectAsync(_candidateSources.stream() - .filter(source -> source.isEligible(userUrn, requestContext)) - .collect(Collectors.toList()), source -> source.getRecommendationModule(userUrn, requestContext), (source, exception) -> { - log.error("Error while fetching candidate modules from source {}", source, exception); - return Optional.empty(); - }).stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList()); + final List candidateModules = + ConcurrencyUtils.transformAndCollectAsync( + _candidateSources.stream() + .filter(source -> source.isEligible(userUrn, requestContext)) + .collect(Collectors.toList()), + source -> source.getRecommendationModule(userUrn, requestContext), + (source, exception) -> { + log.error( + "Error while fetching candidate modules from source {}", source, exception); + return Optional.empty(); + }) + .stream() + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); // Rank recommendation modules, which determines their ordering during rendering return _moduleRanker.rank(candidateModules, userUrn, requestContext, limit); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java index 5aa097ccbb497..9392f50b4749e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/DomainsCandidateSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DomainsCandidateSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } @@ -53,4 +53,3 @@ protected boolean isValueUrn() { return true; } } - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java index e1ebc6d5e97be..a19909576d25b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationSource.java @@ -27,49 +27,36 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - /** * Base class for search aggregation based candidate source (e.g. top platform, top tags, top terms) - * Aggregates entities based on field value in the entity search index and gets the value with the most documents + * Aggregates entities based on field value in the entity search index and gets the value with the + * most documents */ @Slf4j @RequiredArgsConstructor public abstract class EntitySearchAggregationSource implements RecommendationSource { private final EntitySearchService _entitySearchService; - /** - * Field to aggregate on - */ + /** Field to aggregate on */ protected abstract String getSearchFieldName(); - /** - * Max number of contents in module - */ + /** Max number of contents in module */ protected abstract int getMaxContent(); - /** - * Whether the aggregate value is an urn - */ + /** Whether the aggregate value is an urn */ protected abstract boolean isValueUrn(); - /** - * Whether the urn candidate is valid - */ + /** Whether the urn candidate is valid */ protected boolean isValidCandidateUrn(Urn urn) { return true; } - /** - * Whether the string candidate is valid - */ + /** Whether the string candidate is valid */ protected boolean isValidCandidateValue(String candidateValue) { return true; } - /** - * Whether the candidate is valid - * Calls different functions if candidate is an Urn - */ + /** Whether the candidate is valid Calls different functions if candidate is an Urn */ protected boolean isValidCandidate(T candidate) { if (candidate instanceof Urn) { return isValidCandidateUrn((Urn) candidate); @@ -79,10 +66,11 @@ protected boolean isValidCandidate(T candidate) { @Override @WithSpan - public List getRecommendations(@Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nullable RecommendationRequestContext requestContext) { Map aggregationResult = - _entitySearchService.aggregateByValue(getEntityNames(), getSearchFieldName(), null, getMaxContent()); + _entitySearchService.aggregateByValue( + getEntityNames(), getSearchFieldName(), null, getMaxContent()); if (aggregationResult.isEmpty()) { return Collections.emptyList(); @@ -96,15 +84,21 @@ public List getRecommendations(@Nonnull Urn userUrn, } // If the aggregated values are urns, convert key into urns - Map urnCounts = aggregationResult.entrySet().stream().map(entry -> { - try { - Urn tagUrn = Urn.createFromString(entry.getKey()); - return Optional.of(Pair.of(tagUrn, entry.getValue())); - } catch (URISyntaxException e) { - log.error("Invalid tag urn {}", entry.getKey(), e); - return Optional.>empty(); - } - }).filter(Optional::isPresent).map(Optional::get).collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + Map urnCounts = + aggregationResult.entrySet().stream() + .map( + entry -> { + try { + Urn tagUrn = Urn.createFromString(entry.getKey()); + return Optional.of(Pair.of(tagUrn, entry.getValue())); + } catch (URISyntaxException e) { + log.error("Invalid tag urn {}", entry.getKey(), e); + return Optional.>empty(); + } + }) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); if (urnCounts.isEmpty()) { return Collections.emptyList(); @@ -128,13 +122,16 @@ private List> getTopKValues(Map countMap) { for (Map.Entry entry : countMap.entrySet()) { if (queue.size() < getMaxContent() && isValidCandidate(entry.getKey())) { queue.add(entry); - } else if (queue.size() > 0 && queue.peek().getValue() < entry.getValue() && isValidCandidate(entry.getKey())) { + } else if (queue.size() > 0 + && queue.peek().getValue() < entry.getValue() + && isValidCandidate(entry.getKey())) { queue.poll(); queue.add(entry); } } - // Since priority queue polls in reverse order (nature of heaps), need to reverse order before returning + // Since priority queue polls in reverse order (nature of heaps), need to reverse order before + // returning final LinkedList> topK = new LinkedList<>(); while (!queue.isEmpty()) { topK.addFirst(queue.poll()); @@ -149,15 +146,25 @@ private Map mergeAggregation(Map first, Map RecommendationContent buildRecommendationContent(T candidate, long count) { // Set filters for platform - SearchParams searchParams = new SearchParams().setQuery("") - .setFilters(new CriterionArray( - ImmutableList.of(new Criterion().setField(getSearchFieldName()).setValue(candidate.toString())))); + SearchParams searchParams = + new SearchParams() + .setQuery("") + .setFilters( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(getSearchFieldName()) + .setValue(candidate.toString())))); ContentParams contentParams = new ContentParams().setCount(count); RecommendationContent content = new RecommendationContent(); if (candidate instanceof Urn) { content.setEntity((Urn) candidate); } - return content.setValue(candidate.toString()) - .setParams(new RecommendationParams().setSearchParams(searchParams).setContentParams(contentParams)); + return content + .setValue(candidate.toString()) + .setParams( + new RecommendationParams() + .setSearchParams(searchParams) + .setContentParams(contentParams)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java index 357a5df2edd44..e133e3dc75ff3 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecentlySearchedSource.java @@ -33,7 +33,6 @@ import org.opensearch.search.aggregations.bucket.terms.ParsedTerms; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class RecentlySearchedSource implements RecommendationSource { @@ -60,11 +59,16 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { boolean analyticsEnabled = false; try { - analyticsEnabled = _searchClient.indices() - .exists(new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), RequestOptions.DEFAULT); + analyticsEnabled = + _searchClient + .indices() + .exists( + new GetIndexRequest(_indexConvention.getIndexName(DATAHUB_USAGE_INDEX)), + RequestOptions.DEFAULT); } catch (IOException e) { log.error("Failed to check whether DataHub usage index exists"); } @@ -72,15 +76,15 @@ public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestCo } @Override - public List getRecommendations(@Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + public List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { SearchRequest searchRequest = buildSearchRequest(userUrn); try (Timer.Context ignored = MetricUtils.timer(this.getClass(), "getRecentlySearched").time()) { - final SearchResponse searchResponse = _searchClient.search(searchRequest, RequestOptions.DEFAULT); + final SearchResponse searchResponse = + _searchClient.search(searchRequest, RequestOptions.DEFAULT); // extract results ParsedTerms parsedTerms = searchResponse.getAggregations().get(ENTITY_AGG_NAME); - return parsedTerms.getBuckets() - .stream() + return parsedTerms.getBuckets().stream() .map(bucket -> buildContent(bucket.getKeyAsString())) .filter(Optional::isPresent) .map(Optional::get) @@ -97,20 +101,26 @@ private SearchRequest buildSearchRequest(@Nonnull Urn userUrn) { SearchSourceBuilder source = new SearchSourceBuilder(); BoolQueryBuilder query = QueryBuilders.boolQuery(); // Filter for the entity view events of the user requesting recommendation - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); - query.must(QueryBuilders.termQuery(DataHubUsageEventConstants.TYPE, - DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.ACTOR_URN + ".keyword", userUrn.toString())); + query.must( + QueryBuilders.termQuery( + DataHubUsageEventConstants.TYPE, + DataHubUsageEventType.SEARCH_RESULTS_VIEW_EVENT.getType())); query.must(QueryBuilders.rangeQuery("total").gt(0)); query.must(QueryBuilders.existsQuery(DataHubUsageEventConstants.QUERY)); source.query(query); // Find the entity with the largest last viewed timestamp String lastSearched = "last_searched"; - AggregationBuilder aggregation = AggregationBuilders.terms(ENTITY_AGG_NAME) - .field(DataHubUsageEventConstants.QUERY + ".keyword") - .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering - .order(BucketOrder.aggregation(lastSearched, false)) - .subAggregation(AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); + AggregationBuilder aggregation = + AggregationBuilders.terms(ENTITY_AGG_NAME) + .field(DataHubUsageEventConstants.QUERY + ".keyword") + .size(MAX_CONTENT * 2) // Fetch more than max to account for post-filtering + .order(BucketOrder.aggregation(lastSearched, false)) + .subAggregation( + AggregationBuilders.max(lastSearched).field(DataHubUsageEventConstants.TIMESTAMP)); source.aggregation(aggregation); source.size(0); @@ -127,7 +137,10 @@ private Optional buildContent(@Nonnull String query) { if (isQueryInvalid(query)) { return Optional.empty(); } - return Optional.of(new RecommendationContent().setValue(query) - .setParams(new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); + return Optional.of( + new RecommendationContent() + .setValue(query) + .setParams( + new RecommendationParams().setSearchParams(new SearchParams().setQuery(query)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java index 7d43e3652b492..788ef728e294f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationSource.java @@ -11,25 +11,16 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * Base interface for defining a candidate source for recommendation module - */ +/** Base interface for defining a candidate source for recommendation module */ public interface RecommendationSource { - /** - * Returns the title of the module that is sourced (used in rendering) - */ + /** Returns the title of the module that is sourced (used in rendering) */ String getTitle(); - /** - * Returns a unique module id associated with the module - */ + /** Returns a unique module id associated with the module */ String getModuleId(); - /** - * Returns the template type used for rendering recommendations from this module - */ + /** Returns the template type used for rendering recommendations from this module */ RecommendationRenderType getRenderType(); /** @@ -49,7 +40,8 @@ public interface RecommendationSource { * @return list of recommendation candidates */ @WithSpan - List getRecommendations(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); + List getRecommendations( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext); /** * Get the full recommendations module itself provided the request context. @@ -59,8 +51,7 @@ public interface RecommendationSource { * @return list of recommendation candidates */ default Optional getRecommendationModule( - @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext) { + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { if (!isEligible(userUrn, requestContext)) { return Optional.empty(); } @@ -70,9 +61,11 @@ default Optional getRecommendationModule( return Optional.empty(); } - return Optional.of(new RecommendationModule().setTitle(getTitle()) - .setModuleId(getModuleId()) - .setRenderType(getRenderType()) - .setContent(new RecommendationContentArray(recommendations))); + return Optional.of( + new RecommendationModule() + .setTitle(getTitle()) + .setModuleId(getModuleId()) + .setRenderType(getRenderType()) + .setContent(new RecommendationContentArray(recommendations))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java index 3fd2b599b4d39..1fa47d1a13645 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtils.java @@ -4,7 +4,6 @@ import java.util.Set; import javax.annotation.Nonnull; - public class RecommendationUtils { /** @@ -14,10 +13,11 @@ public class RecommendationUtils { * @param entityTypes the set of valid entity types * @return true if the type of the urn is in the set of valid entity types, false otherwise. */ - public static boolean isSupportedEntityType(@Nonnull final Urn urn, @Nonnull final Set entityTypes) { + public static boolean isSupportedEntityType( + @Nonnull final Urn urn, @Nonnull final Set entityTypes) { final String entityType = urn.getEntityType(); return entityTypes.contains(entityType); } - - private RecommendationUtils() { } + + private RecommendationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java index 9562440889f63..3012e35baa607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopPlatformsSource.java @@ -14,29 +14,29 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopPlatformsSource extends EntitySearchAggregationSource { /** - * Set of entities that we want to consider for defining the top platform sources. - * This must match SearchUtils.SEARCHABLE_ENTITY_TYPES + * Set of entities that we want to consider for defining the top platform sources. This must match + * SearchUtils.SEARCHABLE_ENTITY_TYPES */ - private static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_PRIMARY_KEY_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.TAG_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.NOTEBOOK_ENTITY_NAME - ); + private static final List SEARCHABLE_ENTITY_TYPES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME, + Constants.ML_PRIMARY_KEY_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.TAG_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME, + Constants.NOTEBOOK_ENTITY_NAME); + private final EntityService _entityService; private static final String PLATFORM = "platform"; @@ -61,7 +61,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java index 6563ea7dc4f91..317f956e1ca8a 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTagsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTagsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java index e885208a8b6db..6cdb5fdb65911 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/candidatesource/TopTermsSource.java @@ -8,7 +8,6 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class TopTermsSource extends EntitySearchAggregationSource { @@ -34,7 +33,8 @@ public RecommendationRenderType getRenderType() { } @Override - public boolean isEligible(@Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { + public boolean isEligible( + @Nonnull Urn userUrn, @Nonnull RecommendationRequestContext requestContext) { return requestContext.getScenario() == ScenarioType.HOME || requestContext.getScenario() == ScenarioType.SEARCH_RESULTS; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java index 7eae2e949d028..f09f83fd6ec25 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/RecommendationModuleRanker.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nonnull; - public interface RecommendationModuleRanker { /** * Rank and return the final list of modules @@ -17,6 +16,9 @@ public interface RecommendationModuleRanker { * @param limit Max number of modules to return * @return ranked list of modules */ - List rank(@Nonnull List candidates, @Nonnull Urn userUrn, - @Nonnull RecommendationRequestContext requestContext, int limit); + List rank( + @Nonnull List candidates, + @Nonnull Urn userUrn, + @Nonnull RecommendationRequestContext requestContext, + int limit); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java index cefb9aec5ac51..13bc5af91c9e9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/recommendation/ranker/SimpleRecommendationRanker.java @@ -7,11 +7,13 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class SimpleRecommendationRanker implements RecommendationModuleRanker { @Override - public List rank(@Nonnull List candidates, @Nonnull Urn userUrn, - @Nullable RecommendationRequestContext requestContext, int limit) { + public List rank( + @Nonnull List candidates, + @Nonnull Urn userUrn, + @Nullable RecommendationRequestContext requestContext, + int limit) { return candidates.subList(0, Math.min(candidates.size(), limit)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java index 0a0be60969486..9b5630875cd15 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryService.java @@ -4,7 +4,8 @@ import org.apache.avro.Schema; /** - * Internal Service logic to be used to emulate Confluent's Schema Registry component within DataHub. + * Internal Service logic to be used to emulate Confluent's Schema Registry component within + * DataHub. */ public interface SchemaRegistryService { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java index 8f7403c6aa428..6e6671c08242b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/registry/SchemaRegistryServiceImpl.java @@ -18,7 +18,6 @@ import lombok.Getter; import org.apache.avro.Schema; - public class SchemaRegistryServiceImpl implements SchemaRegistryService { @AllArgsConstructor @@ -33,8 +32,7 @@ private enum TopicOrdinal { MAE_TOPIC(MetadataAuditEvent.getClassSchema()), DUHE_TOPIC(DataHubUpgradeHistoryEvent.getClassSchema()); - @Getter - private final Schema schema; + @Getter private final Schema schema; } private final Map _schemaMap; @@ -44,28 +42,45 @@ private enum TopicOrdinal { public SchemaRegistryServiceImpl(final TopicConvention convention) { this._schemaMap = new HashMap<>(); this._subjectToIdMap = HashBiMap.create(); - this._schemaMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._schemaMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeProposalTopicName(), TopicOrdinal.MCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeLogVersionedTopicName(), TopicOrdinal.MCL_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeLogTimeseriesTopicName(), + this._subjectToIdMap.put( + convention.getMetadataChangeLogTimeseriesTopicName(), TopicOrdinal.MCL_TIMESERIES_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeProposalTopicName(), TopicOrdinal.FMCP_TOPIC.ordinal()); this._schemaMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); - this._schemaMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); + this._subjectToIdMap.put( + convention.getPlatformEventTopicName(), TopicOrdinal.PE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getDataHubUpgradeHistoryTopicName(), TopicOrdinal.DUHE_TOPIC.ordinal()); // Adding legacy topics as they are still produced in the EntityService IngestAspect code path. - this._schemaMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); - this._schemaMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); - this._subjectToIdMap.put(convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataChangeEventTopicName(), TopicOrdinal.MCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getFailedMetadataChangeEventTopicName(), TopicOrdinal.FMCE_TOPIC.ordinal()); + this._schemaMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.getSchema()); + this._subjectToIdMap.put( + convention.getMetadataAuditEventTopicName(), TopicOrdinal.MAE_TOPIC.ordinal()); } @Override diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java index 9eb67ca25dd8b..e7a115d1a0518 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/ResourceReference.java @@ -4,22 +4,15 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class ResourceReference { - /** - * The urn of an entity - */ + /** The urn of an entity */ Urn urn; - /** - * The type of the SubResource - */ + /** The type of the SubResource */ SubResourceType subResourceType; - /** - * The subresource being targeted - */ + /** The subresource being targeted */ String subResource; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java index 6a23158aa1fd9..042c6d1407a13 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/resource/SubResourceType.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.resource; public enum SubResourceType { - /** - * A field in a dataset - */ + /** A field in a dataset */ DATASET_FIELD } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java index 9cd865bd888e2..09a63e769f025 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/EntitySearchService.java @@ -7,20 +7,16 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; - import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface EntitySearchService { void configure(); - /** - * Clear all data within the service - */ + /** Clear all data within the service */ void clear(); /** @@ -30,7 +26,6 @@ public interface EntitySearchService { */ long docCount(@Nonnull String entityName); - /** * Updates or inserts the given search document. * @@ -58,64 +53,90 @@ public interface EntitySearchService { void appendRunId(@Nonnull String entityName, @Nonnull Urn urn, @Nullable String runId); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * Impl + *

Safe for non-structured, user input, queries with an attempt to provide some advanced + * features Impl * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags); + SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * - * Safe for non-structured, user input, queries with an attempt to provide some advanced features - * Impl + *

Safe for non-structured, user input, queries with an attempt to provide some advanced + * features Impl * * @param entityNames names of the entities * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size the number of search hits to return * @param searchFlags flags controlling search options * @param facets list of facets we want aggregations for - * @return a {@link SearchResult} that contains a list of matched documents and related search result metadata + * @return a {@link SearchResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - SearchResult search(@Nonnull List entityNames, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, int from, int size, @Nullable SearchFlags searchFlags, @Nullable List facets); + SearchResult search( + @Nonnull List entityNames, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + int from, + int size, + @Nullable SearchFlags searchFlags, + @Nullable List facets); /** * Gets a list of documents after applying the input filters. * * @param entityName name of the entity - * @param filters the request map with fields and values to be applied as filters to the search query + * @param filters the request map with fields and values to be applied as filters to the search + * query * @param sortCriterion {@link SortCriterion} to be applied to search results * @param from index to start the search from * @param size number of search hits to return - * @return a {@link SearchResult} that contains a list of filtered documents and related search result metadata + * @return a {@link SearchResult} that contains a list of filtered documents and related search + * result metadata */ @Nonnull - SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nullable SortCriterion sortCriterion, - int from, int size); + SearchResult filter( + @Nonnull String entityName, + @Nullable Filter filters, + @Nullable SortCriterion sortCriterion, + int from, + int size); /** * Returns a list of suggestions given type ahead query. * - *

The advanced auto complete can take filters and provides suggestions based on filtered context. + *

The advanced auto complete can take filters and provides suggestions based on filtered + * context. * * @param entityName name of the entity * @param query the type ahead query text @@ -125,21 +146,29 @@ SearchResult filter(@Nonnull String entityName, @Nullable Filter filters, @Nulla * @return A list of suggestions as string */ @Nonnull - AutoCompleteResult autoComplete(@Nonnull String entityName, @Nonnull String query, @Nullable String field, - @Nullable Filter requestParams, int limit); + AutoCompleteResult autoComplete( + @Nonnull String entityName, + @Nonnull String query, + @Nullable String field, + @Nullable Filter requestParams, + int limit); /** * Returns number of documents per field value given the field and filters * - * @param entityNames list of name of entities to aggregate across, if empty aggregate over all entities + * @param entityNames list of name of entities to aggregate across, if empty aggregate over all + * entities * @param field the field name for aggregate * @param requestParams filters to apply before aggregating * @param limit the number of aggregations to return * @return */ @Nonnull - Map aggregateByValue(@Nullable List entityNames, @Nonnull String field, - @Nullable Filter requestParams, int limit); + Map aggregateByValue( + @Nullable List entityNames, + @Nonnull String field, + @Nullable Filter requestParams, + int limit); /** * Gets a list of groups/entities that match given browse request. @@ -152,7 +181,11 @@ Map aggregateByValue(@Nullable List entityNames, @Nonnull * @return a {@link BrowseResult} that contains a list of groups/entities */ @Nonnull - BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable Filter requestParams, int from, + BrowseResult browse( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter requestParams, + int from, int size); /** @@ -166,7 +199,13 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable * @param count max number of results requested */ @Nonnull - public BrowseResultV2 browseV2(@Nonnull String entityName, @Nonnull String path, @Nullable Filter filter, @Nonnull String input, int start, int count); + public BrowseResultV2 browseV2( + @Nonnull String entityName, + @Nonnull String path, + @Nullable Filter filter, + @Nonnull String input, + int start, + int count); /** * Gets a list of paths for a given urn. @@ -179,41 +218,57 @@ BrowseResult browse(@Nonnull String entityName, @Nonnull String path, @Nullable List getBrowsePaths(@Nonnull String entityName, @Nonnull Urn urn); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult fullTextScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult fullTextScroll( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); /** - * Gets a list of documents that match given search request. The results are aggregated and filters are applied to the - * search hits and not the aggregation results. + * Gets a list of documents that match given search request. The results are aggregated and + * filters are applied to the search hits and not the aggregation results. * * @param entities name of the entities to scroll across * @param input the search input text - * @param postFilters the request map with fields and values as filters to be applied to search hits + * @param postFilters the request map with fields and values as filters to be applied to search + * hits * @param sortCriterion {@link SortCriterion} to be applied to search results * @param scrollId opaque scroll identifier to pass to search service * @param size the number of search hits to return * @param searchFlags flags controlling search options - * @return a {@link ScrollResult} that contains a list of matched documents and related search result metadata + * @return a {@link ScrollResult} that contains a list of matched documents and related search + * result metadata */ @Nonnull - ScrollResult structuredScroll(@Nonnull List entities, @Nonnull String input, @Nullable Filter postFilters, - @Nullable SortCriterion sortCriterion, @Nullable String scrollId, @Nullable String keepAlive, int size, @Nullable SearchFlags searchFlags); + ScrollResult structuredScroll( + @Nonnull List entities, + @Nonnull String input, + @Nullable Filter postFilters, + @Nullable SortCriterion sortCriterion, + @Nullable String scrollId, + @Nullable String keepAlive, + int size, + @Nullable SearchFlags searchFlags); - /** - * Max result size returned by the underlying search backend - */ + /** Max result size returned by the underlying search backend */ int maxResultSize(); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java index 31b94425d6815..842cc51e11777 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/search/utils/QueryUtils.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.search.utils; +import static com.linkedin.metadata.Constants.*; + import com.datahub.util.ModelUtils; import com.google.common.collect.ImmutableList; import com.linkedin.data.template.RecordTemplate; @@ -22,15 +24,11 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class QueryUtils { public static final Filter EMPTY_FILTER = new Filter().setOr(new ConjunctiveCriterionArray()); - private QueryUtils() { - } + private QueryUtils() {} // Creates new Criterion with field and value, using EQUAL condition. @Nonnull @@ -40,23 +38,31 @@ public static Criterion newCriterion(@Nonnull String field, @Nonnull String valu // Creates new Criterion with field, value and condition. @Nonnull - public static Criterion newCriterion(@Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { - return new Criterion().setField(field).setValue(value).setValues(new StringArray(ImmutableList.of(value))).setCondition(condition); + public static Criterion newCriterion( + @Nonnull String field, @Nonnull String value, @Nonnull Condition condition) { + return new Criterion() + .setField(field) + .setValue(value) + .setValues(new StringArray(ImmutableList.of(value))) + .setCondition(condition); } - // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL condition (default). + // Creates new Filter from a map of Criteria by removing null-valued Criteria and using EQUAL + // condition (default). @Nonnull public static Filter newFilter(@Nullable Map params) { if (params == null) { return EMPTY_FILTER; } - CriterionArray criteria = params.entrySet() - .stream() - .filter(e -> Objects.nonNull(e.getValue())) - .map(e -> newCriterion(e.getKey(), e.getValue())) - .collect(Collectors.toCollection(CriterionArray::new)); - return new Filter().setOr( - new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); + CriterionArray criteria = + params.entrySet().stream() + .filter(e -> Objects.nonNull(e.getValue())) + .map(e -> newCriterion(e.getKey(), e.getValue())) + .collect(Collectors.toCollection(CriterionArray::new)); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(criteria)))); } // Creates new Filter from a single Criterion with EQUAL condition (default). @@ -68,8 +74,12 @@ public static Filter newFilter(@Nonnull String field, @Nonnull String value) { // Create singleton filter with one criterion @Nonnull public static Filter newFilter(@Nonnull Criterion criterion) { - return new Filter().setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(criterion)))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd(new CriterionArray(ImmutableList.of(criterion)))))); } @Nonnull @@ -78,13 +88,18 @@ public static Filter filterOrDefaultEmptyFilter(@Nullable Filter filter) { } /** - * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to latest. + * Converts a set of aspect classes to a set of {@link AspectVersion} with the version all set to + * latest. */ @Nonnull - public static Set latestAspectVersions(@Nonnull Set> aspectClasses) { + public static Set latestAspectVersions( + @Nonnull Set> aspectClasses) { return aspectClasses.stream() - .map(aspectClass -> new AspectVersion().setAspect(ModelUtils.getAspectName(aspectClass)) - .setVersion(LATEST_VERSION)) + .map( + aspectClass -> + new AspectVersion() + .setAspect(ModelUtils.getAspectName(aspectClass)) + .setVersion(LATEST_VERSION)) .collect(Collectors.toSet()); } @@ -97,7 +112,9 @@ public static Set latestAspectVersions(@Nonnull Set criteria) { - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)))); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java index 1995e3c1b80a1..a735374b54858 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java @@ -13,7 +13,6 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - public class SecretService { private static final int LOWERCASE_ASCII_START = 97; private static final int LOWERCASE_ASCII_END = 122; @@ -82,7 +81,8 @@ public String decrypt(String encryptedValue) { } public String generateUrlSafeToken(int length) { - return _secureRandom.ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) + return _secureRandom + .ints(length, LOWERCASE_ASCII_START, LOWERCASE_ASCII_END + 1) .mapToObj(i -> String.valueOf((char) i)) .collect(Collectors.joining()); } @@ -98,7 +98,8 @@ public byte[] generateSalt(int length) { return randomBytes; } - public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) throws IOException { + public String getHashedPassword(@Nonnull byte[] salt, @Nonnull String password) + throws IOException { byte[] saltedPassword = saltPassword(salt, password); byte[] hashedPassword = _messageDigest.digest(saltedPassword); return _encoder.encodeToString(hashedPassword); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java index 7fac2e0124897..ce7473fb29dc4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/BaseService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -20,15 +22,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - @Slf4j public class BaseService { protected final EntityClient entityClient; protected final Authentication systemAuthentication; - public BaseService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public BaseService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { this.entityClient = Objects.requireNonNull(entityClient); this.systemAuthentication = Objects.requireNonNull(systemAuthentication); } @@ -44,13 +45,13 @@ protected Map getTagsAspects( } try { - Map aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOBAL_TAGS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOBAL_TAGS_ASPECT_NAME, + this.entityClient, + authentication); final Map finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -83,13 +84,13 @@ protected Map getEditableSchemaMetadataAspects( } try { - Map aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - this.entityClient, - authentication - ); + Map aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + this.entityClient, + authentication); final Map finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -122,13 +123,13 @@ protected Map getOwnershipAspects( } try { - Map aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.OWNERSHIP_ASPECT_NAME, - this.entityClient, - authentication - ); + Map aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.OWNERSHIP_ASPECT_NAME, + this.entityClient, + authentication); final Map finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -161,13 +162,13 @@ protected Map getGlossaryTermsAspects( } try { - Map aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + this.entityClient, + authentication); final Map finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -200,13 +201,13 @@ protected Map getDomainsAspects( } try { - Map aspects = batchGetLatestAspect( - entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. - entityUrns, - Constants.DOMAINS_ASPECT_NAME, - this.entityClient, - authentication - ); + Map aspects = + batchGetLatestAspect( + entityUrns.stream().findFirst().get().getEntityType(), // TODO Improve this. + entityUrns, + Constants.DOMAINS_ASPECT_NAME, + this.entityClient, + authentication); final Map finalResult = new HashMap<>(); for (Urn entity : entityUrns) { @@ -228,7 +229,9 @@ protected Map getDomainsAspects( } } - protected void ingestChangeProposals(@Nonnull List changes, @Nonnull Authentication authentication) throws Exception { + protected void ingestChangeProposals( + @Nonnull List changes, @Nonnull Authentication authentication) + throws Exception { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { this.entityClient.ingestProposal(change, authentication); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java index 87b96e4cef498..10016ee89605b 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DataProductService.java @@ -22,22 +22,20 @@ import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.utils.EntityKeyUtils; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.List; import java.util.Objects; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; /** * This class is used to permit easy CRUD operations on a DataProduct * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. - * + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class DataProductService { @@ -52,18 +50,15 @@ public DataProductService(@Nonnull EntityClient entityClient, @Nonnull GraphClie /** * Creates a new Data Product. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the DataProduct * @param description optional description of the DataProduct - * * @return the urn of the newly created DataProduct */ public Urn createDataProduct( - @Nullable String name, - @Nullable String description, - @Nonnull Authentication authentication) { + @Nullable String name, @Nullable String description, @Nonnull Authentication authentication) { // 1. Generate a unique id for the new DataProduct. final DataProductKey key = new DataProductKey(); @@ -76,10 +71,14 @@ public Urn createDataProduct( // 3. Write the new dataProduct to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_PRODUCT_ENTITY_NAME); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create DataProduct", e); } @@ -88,8 +87,8 @@ public Urn createDataProduct( /** * Updates an existing DataProduct. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the DataProduct * @param name optional name of the DataProduct @@ -108,7 +107,9 @@ public Urn updateDataProduct( DataProductProperties properties = getDataProductProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format( + "Failed to update DataProduct. DataProduct with urn %s does not exist.", urn)); } // 2. Apply changes to existing DataProduct @@ -121,9 +122,12 @@ public Urn updateDataProduct( // 3. Write changes to GMS try { - return UrnUtils.getUrn(_entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), authentication, - false)); + return UrnUtils.getUrn( + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties), + authentication, + false)); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); } @@ -132,16 +136,23 @@ public Urn updateDataProduct( /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public DataProductProperties getDataProductProperties(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public DataProductProperties getDataProductProperties( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - return new DataProductProperties(response.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + return new DataProductProperties( + response + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); } // No aspect found return null; @@ -150,41 +161,44 @@ public DataProductProperties getDataProductProperties(@Nonnull final Urn dataPro /** * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * - * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not exist. + * @return an instance of {@link DataProductProperties} for the DataProduct, null if it does not + * exist. */ @Nullable - public Domains getDataProductDomains(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public Domains getDataProductDomains( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - final EntityResponse response = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication - ); + final EntityResponse response = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); if (response != null && response.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - return new Domains(response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); + return new Domains( + response.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } /** - * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified DataProduct urn, or null if one + * cannot be found. * * @param dataProductUrn the urn of the DataProduct * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the DataProduct, null if it does not exist. */ @Nullable - public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { + public EntityResponse getDataProductEntityResponse( + @Nonnull final Urn dataProductUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -192,79 +206,92 @@ public EntityResponse getDataProductEntityResponse(@Nonnull final Urn dataProduc Constants.DATA_PRODUCT_ENTITY_NAME, dataProductUrn, ImmutableSet.of(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve DataProduct with urn %s", dataProductUrn), e); } } - /** - * Sets a given domain on a given Data Product. - */ - public void setDomain(@Nonnull final Urn dataProductUrn, @Nonnull final Urn domainUrn, @Nonnull final Authentication authentication) { + /** Sets a given domain on a given Data Product. */ + public void setDomain( + @Nonnull final Urn dataProductUrn, + @Nonnull final Urn domainUrn, + @Nonnull final Authentication authentication) { try { Domains domains = new Domains(); - EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), - authentication); + EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME), + authentication); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DOMAINS_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.DOMAINS_ASPECT_NAME).getValue().data(); domains = new Domains(dataMap); } final UrnArray newDomains = new UrnArray(); newDomains.add(domainUrn); domains.setDomains(newDomains); - _entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), authentication, false); + _entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + dataProductUrn, Constants.DOMAINS_ASPECT_NAME, domains), + authentication, + false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to set domain for DataProduct with urn %s", dataProductUrn), e); } } /** * Deletes an existing DataProduct with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the DataProduct does not exist, no exception will be thrown. + *

If the DataProduct does not exist, no exception will be thrown. * * @param dataProductUrn the urn of the DataProduct * @param authentication the current authentication */ public void deleteDataProduct( - @Nonnull Urn dataProductUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn dataProductUrn, @Nonnull Authentication authentication) { try { _entityClient.deleteEntity( Objects.requireNonNull(dataProductUrn, "dataProductUrn must not be null"), Objects.requireNonNull(authentication, "authentication must not be null")); // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(dataProductUrn, authentication); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for DataProduct with urn %s", dataProductUrn), e); - } - }); + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(dataProductUrn, authentication); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for DataProduct with urn %s", + dataProductUrn), + e); + } + }); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to delete DataProduct with urn %s", dataProductUrn), e); } } /** * Sets a Data Product for a given list of entities. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param dataProductUrn the urn of the Data Product to set - null if removing Data Product * @param resourceUrns the urns of the entities to add the Data Product to @@ -276,9 +303,11 @@ public void batchSetDataProduct( @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { - throw new RuntimeException("Failed to batch set data product as data product does not exist"); + throw new RuntimeException( + "Failed to batch set data product as data product does not exist"); } DataProductAssociationArray dataProductAssociations = new DataProductAssociationArray(); @@ -286,15 +315,23 @@ public void batchSetDataProduct( dataProductAssociations = dataProductProperties.getAssets(); } - List existingResourceUrns = dataProductAssociations.stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList()); - List newResourceUrns = resourceUrns.stream().filter(urn -> !existingResourceUrns.contains(urn)).collect(Collectors.toList()); - - // unset existing data product on resources first as we only allow one data product on an entity at a time + List existingResourceUrns = + dataProductAssociations.stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList()); + List newResourceUrns = + resourceUrns.stream() + .filter(urn -> !existingResourceUrns.contains(urn)) + .collect(Collectors.toList()); + + // unset existing data product on resources first as we only allow one data product on an + // entity at a time for (Urn resourceUrn : resourceUrns) { unsetDataProduct(resourceUrn, authentication, actorUrn); } - AuditStamp nowAuditStamp = new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); + AuditStamp nowAuditStamp = + new AuditStamp().setTime(System.currentTimeMillis()).setActor(actorUrn); for (Urn resourceUrn : newResourceUrns) { DataProductAssociation association = new DataProductAssociation(); association.setDestinationUrn(resourceUrn); @@ -306,53 +343,59 @@ public void batchSetDataProduct( dataProductProperties.setAssets(dataProductAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update assets for %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to update assets for %s", dataProductUrn), e); } } /** * Unsets a Data Product for a given entity. Remove this entity from its data product(s). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * * @param resourceUrn the urn of the entity to remove the Data Product from * @param authentication the current authentication */ public void unsetDataProduct( - @Nonnull Urn resourceUrn, - @Nonnull Authentication authentication, - @Nonnull Urn actorUrn) { + @Nonnull Urn resourceUrn, @Nonnull Authentication authentication, @Nonnull Urn actorUrn) { try { List relationshipTypes = ImmutableList.of("DataProductContains"); - EntityRelationships relationships = _graphClient.getRelatedEntities( - resourceUrn.toString(), - relationshipTypes, - RelationshipDirection.INCOMING, - 0, - 10, // should never be more than 1 as long as we only allow one - actorUrn.toString()); + EntityRelationships relationships = + _graphClient.getRelatedEntities( + resourceUrn.toString(), + relationshipTypes, + RelationshipDirection.INCOMING, + 0, + 10, // should never be more than 1 as long as we only allow one + actorUrn.toString()); if (relationships.hasRelationships() && relationships.getRelationships().size() > 0) { - relationships.getRelationships().forEach(relationship -> { - Urn dataProductUrn = relationship.getEntity(); - removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); - }); + relationships + .getRelationships() + .forEach( + relationship -> { + Urn dataProductUrn = relationship.getEntity(); + removeEntityFromDataProduct(dataProductUrn, resourceUrn, authentication); + }); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } - private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull Urn resourceUrn, @Nonnull Authentication authentication) { + private void removeEntityFromDataProduct( + @Nonnull Urn dataProductUrn, + @Nonnull Urn resourceUrn, + @Nonnull Authentication authentication) { try { - DataProductProperties dataProductProperties = getDataProductProperties(dataProductUrn, authentication); + DataProductProperties dataProductProperties = + getDataProductProperties(dataProductUrn, authentication); if (dataProductProperties == null) { throw new RuntimeException("Failed to unset data product as data product does not exist"); } @@ -373,23 +416,22 @@ private void removeEntityFromDataProduct(@Nonnull Urn dataProductUrn, @Nonnull U dataProductProperties.setAssets(finalAssociations); _entityClient.ingestProposal( AspectUtils.buildMetadataChangeProposal( - dataProductUrn, - Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - dataProductProperties), + dataProductUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties), authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset data product for %s", resourceUrn), e); + throw new RuntimeException( + String.format("Failed to unset data product for %s", resourceUrn), e); } } public boolean verifyEntityExists( - @Nonnull Urn entityUrn, - @Nonnull Authentication authentication) { + @Nonnull Urn entityUrn, @Nonnull Authentication authentication) { try { return _entityClient.exists(entityUrn, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to determine if entity with urn %s exists", entityUrn), e); + throw new RuntimeException( + String.format("Failed to determine if entity with urn %s exists", entityUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java index 782a261675add..c18122eb9bb31 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/DomainService.java @@ -1,10 +1,14 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.domain.Domains; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -14,19 +18,15 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class DomainService extends BaseService { - public DomainService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public DomainService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -47,14 +47,19 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List resources, @Nonnull Authentication authentication) { + public void batchSetDomain( + @Nonnull Urn domainUrn, + @Nonnull List resources, + @Nonnull Authentication authentication) { log.debug("Batch setting Domain to entities. domain: {}, resources: {}", resources, domainUrn); try { setDomainForResources(domainUrn, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - domainUrn, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + domainUrn, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -65,7 +70,8 @@ public void batchSetDomain(@Nonnull Urn domainUrn, @Nonnull List domainUrns, @Nonnull List resources) { + public void batchAddDomains( + @Nonnull List domainUrns, @Nonnull List resources) { batchAddDomains(domainUrns, resources, this.systemAuthentication); } @@ -76,14 +82,20 @@ public void batchAddDomains(@Nonnull List domainUrns, @Nonnull List domainUrns, @Nonnull List resources, @Nonnull Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchAddDomains( + @Nonnull List domainUrns, + @Nonnull List resources, + @Nonnull Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { addDomainsToResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,13 +115,16 @@ public void batchUnsetDomain(@Nonnull List resources) { * @param resources references to the resources to change * @param authentication authentication to use when making the change */ - public void batchUnsetDomain(@Nonnull List resources, @Nullable Authentication authentication) { + public void batchUnsetDomain( + @Nonnull List resources, @Nullable Authentication authentication) { log.debug("Batch unsetting Domains to entities. resources: {}", resources); try { unsetDomainForResources(resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to unset add Domain for resources with urns %s!", - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to unset add Domain for resources with urns %s!", + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -120,7 +135,8 @@ public void batchUnsetDomain(@Nonnull List resources, @Nullab * @param domainUrns the urns of domains to remove * @param resources references to the resources to change */ - public void batchRemoveDomains(@Nonnull List domainUrns, @Nonnull List resources) { + public void batchRemoveDomains( + @Nonnull List domainUrns, @Nonnull List resources) { batchRemoveDomains(domainUrns, resources, this.systemAuthentication); } @@ -131,23 +147,29 @@ public void batchRemoveDomains(@Nonnull List domainUrns, @Nonnull List domainUrns, @Nonnull List resources, @Nullable Authentication authentication) { - log.debug("Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); + public void batchRemoveDomains( + @Nonnull List domainUrns, + @Nonnull List resources, + @Nullable Authentication authentication) { + log.debug( + "Batch adding Domains to entities. domains: {}, resources: {}", resources, domainUrns); try { removeDomainsFromResources(domainUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Domains %s to resources with urns %s!", - domainUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), - e); + throw new RuntimeException( + String.format( + "Failed to batch add Domains %s to resources with urns %s!", + domainUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + e); } } private void setDomainForResources( com.linkedin.common.urn.Urn domainUrn, List resources, - @Nullable Authentication authentication - ) throws Exception { + @Nullable Authentication authentication) + throws Exception { final List changes = buildSetDomainProposals(domainUrn, resources); ingestChangeProposals(changes, authentication); } @@ -155,40 +177,37 @@ private void setDomainForResources( private void addDomainsToResources( List domainUrns, List resources, - @Nonnull Authentication authentication - ) throws Exception { - final List changes = buildAddDomainsProposals(domainUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List changes = + buildAddDomainsProposals(domainUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void unsetDomainForResources( - List resources, - @Nonnull Authentication authentication - ) throws Exception { + List resources, @Nonnull Authentication authentication) throws Exception { final List changes = buildUnsetDomainProposals(resources); ingestChangeProposals(changes, authentication); } public void removeDomainsFromResources( - List domains, - List resources, - @Nonnull Authentication authentication - ) throws Exception { - final List changes = buildRemoveDomainsProposals(domains, resources, authentication); + List domains, List resources, @Nonnull Authentication authentication) + throws Exception { + final List changes = + buildRemoveDomainsProposals(domains, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting @Nonnull List buildSetDomainProposals( - com.linkedin.common.urn.Urn domainUrn, - List resources - ) { + com.linkedin.common.urn.Urn domainUrn, List resources) { List changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(ImmutableList.of(domainUrn))); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -198,40 +217,40 @@ List buildSetDomainProposals( List buildAddDomainsProposals( List domainUrns, List resources, - @Nonnull Authentication authentication - ) throws URISyntaxException { + @Nonnull Authentication authentication) + throws URISyntaxException { - final Map domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + final Map domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); - final List proposals = new ArrayList<>(); - for (ResourceReference resource : resources) { - Domains domains = domainAspects.get(resource.getUrn()); - if (domains == null) { - continue; - } - if (!domains.hasDomains()) { - domains.setDomains(new UrnArray()); - } - addDomainsIfNotExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); - } - return proposals; + final List proposals = new ArrayList<>(); + for (ResourceReference resource : resources) { + Domains domains = domainAspects.get(resource.getUrn()); + if (domains == null) { + continue; + } + if (!domains.hasDomains()) { + domains.setDomains(new UrnArray()); + } + addDomainsIfNotExists(domains, domainUrns); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + } + return proposals; } @VisibleForTesting @Nonnull - List buildUnsetDomainProposals( - List resources - ) { + List buildUnsetDomainProposals(List resources) { final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { Domains domains = new Domains(); domains.setDomains(new UrnArray(Collections.emptyList())); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + changes.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return changes; } @@ -241,13 +260,12 @@ List buildUnsetDomainProposals( List buildRemoveDomainsProposals( List domainUrns, List resources, - @Nonnull Authentication authentication - ) { - final Map domainAspects = getDomainsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Domains(), - authentication - ); + @Nonnull Authentication authentication) { + final Map domainAspects = + getDomainsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Domains(), + authentication); final List proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -259,7 +277,8 @@ List buildRemoveDomainsProposals( domains.setDomains(new UrnArray()); } removeDomainsIfExists(domains, domainUrns); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.DOMAINS_ASPECT_NAME, domains)); } return proposals; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java index 36f2ba85ec98f..902ad07354d5e 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/GlossaryTermService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlossaryTerms; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; +import com.linkedin.common.GlossaryTerms; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -21,18 +25,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class GlossaryTermService extends BaseService { - public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public GlossaryTermService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -41,11 +41,9 @@ public GlossaryTermService(@Nonnull EntityClient entityClient, @Nonnull Authenti * * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change - * */ public void batchAddGlossaryTerms( - @Nonnull List glossaryTermUrns, - @Nonnull List resources) { + @Nonnull List glossaryTermUrns, @Nonnull List resources) { batchAddGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -55,19 +53,23 @@ public void batchAddGlossaryTerms( * @param glossaryTermUrns the urns of the terms to add * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchAddGlossaryTerms( @Nonnull List glossaryTermUrns, @Nonnull List resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { addGlossaryTermsToResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -77,11 +79,9 @@ public void batchAddGlossaryTerms( * * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change - * */ public void batchRemoveGlossaryTerms( - @Nonnull List glossaryTermUrns, - @Nonnull List resources) { + @Nonnull List glossaryTermUrns, @Nonnull List resources) { batchRemoveGlossaryTerms(glossaryTermUrns, resources, this.systemAuthentication); } @@ -91,59 +91,69 @@ public void batchRemoveGlossaryTerms( * @param glossaryTermUrns the urns of the terms to remove * @param resources references to the resources to change * @param authentication authentication to use when making the change - * */ public void batchRemoveGlossaryTerms( @Nonnull List glossaryTermUrns, @Nonnull List resources, @Nonnull Authentication authentication) { - log.debug("Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", resources, glossaryTermUrns); + log.debug( + "Batch adding GlossaryTerms to entities. glossaryTerms: {}, resources: {}", + resources, + glossaryTermUrns); try { removeGlossaryTermsFromResources(glossaryTermUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add GlossaryTerms %s to resources with urns %s!", - glossaryTermUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add GlossaryTerms %s to resources with urns %s!", + glossaryTermUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } private void addGlossaryTermsToResources( - List glossaryTerms, - List resources, - Authentication authentication - ) throws Exception { - List changes = buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); + List glossaryTerms, List resources, Authentication authentication) + throws Exception { + List changes = + buildAddGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeGlossaryTermsFromResources( - List glossaryTerms, - List resources, - Authentication authentication - ) throws Exception { - List changes = buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); + List glossaryTerms, List resources, Authentication authentication) + throws Exception { + List changes = + buildRemoveGlossaryTermsProposals(glossaryTerms, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List buildAddGlossaryTermsProposals( - List glossaryTermUrns, - List resources, - Authentication authentication - ) throws URISyntaxException { + List glossaryTermUrns, List resources, Authentication authentication) + throws URISyntaxException { final List changes = new ArrayList<>(); - final List entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List entityProposals = buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List schemaFieldProposals = buildAddGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List entityProposals = + buildAddGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List schemaFieldProposals = + buildAddGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -155,20 +165,29 @@ List buildAddGlossaryTermsProposals( List buildRemoveGlossaryTermsProposals( List glossaryTermUrns, List resources, - Authentication authentication - ) { + Authentication authentication) { final List changes = new ArrayList<>(); - final List entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List entityProposals = buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); - - final List schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List schemaFieldProposals = buildRemoveGlossaryTermsToSubResourceProposals(glossaryTermUrns, schemaFieldRefs, authentication); + final List entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List entityProposals = + buildRemoveGlossaryTermsToEntityProposals(glossaryTermUrns, entityRefs, authentication); + + final List schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List schemaFieldProposals = + buildRemoveGlossaryTermsToSubResourceProposals( + glossaryTermUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -180,14 +199,14 @@ List buildRemoveGlossaryTermsProposals( List buildAddGlossaryTermsToEntityProposals( List glossaryTermUrns, List resources, - Authentication authentication - ) throws URISyntaxException { + Authentication authentication) + throws URISyntaxException { - final Map glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -199,10 +218,15 @@ List buildAddGlossaryTermsToEntityProposals( if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp().setTime(System.currentTimeMillis()).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addGlossaryTermsIfNotExists(glossaryTerms, glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms)); } return changes; } @@ -211,31 +235,36 @@ List buildAddGlossaryTermsToEntityProposals( List buildAddGlossaryTermsToSubResourceProposals( final List glossaryTermUrns, final List resources, - final Authentication authentication - ) throws URISyntaxException { - final Map editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Authentication authentication) + throws URISyntaxException { + final Map editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addGlossaryTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -245,14 +274,13 @@ List buildAddGlossaryTermsToSubResourceProposals( List buildRemoveGlossaryTermsToEntityProposals( List glossaryTermUrns, List resources, - Authentication authentication - ) { + Authentication authentication) { - final Map glossaryTermAspects = getGlossaryTermsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlossaryTerms(), - authentication - ); + final Map glossaryTermAspects = + getGlossaryTermsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlossaryTerms(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -262,15 +290,15 @@ List buildRemoveGlossaryTermsToEntityProposals( } if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); - glossaryTerms.setAuditStamp(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + glossaryTerms.setAuditStamp( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } removeGlossaryTermsIfExists(glossaryTerms, glossaryTermUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, glossaryTerms); changes.add(proposal); } @@ -281,37 +309,42 @@ List buildRemoveGlossaryTermsToEntityProposals( List buildRemoveGlossaryTermsToSubResourceProposals( List glossaryTermUrns, List resources, - Authentication authentication - ) { + Authentication authentication) { - final Map editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeGlossaryTermsIfExists(editableFieldInfo.getGlossaryTerms(), glossaryTermUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; } - private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List glossaryTermUrns) throws URISyntaxException { + private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List glossaryTermUrns) + throws URISyntaxException { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } @@ -320,7 +353,8 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List List glossaryTermsToAdd = new ArrayList<>(); for (Urn glossaryTermUrn : glossaryTermUrns) { - if (glossaryTermAssociationArray.stream().anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { + if (glossaryTermAssociationArray.stream() + .anyMatch(association -> association.getUrn().equals(glossaryTermUrn))) { continue; } glossaryTermsToAdd.add(glossaryTermUrn); @@ -338,30 +372,30 @@ private void addGlossaryTermsIfNotExists(GlossaryTerms glossaryTerms, List } } - private static GlossaryTermAssociationArray removeGlossaryTermsIfExists(GlossaryTerms glossaryTerms, List glossaryTermUrns) { + private static GlossaryTermAssociationArray removeGlossaryTermsIfExists( + GlossaryTerms glossaryTerms, List glossaryTermUrns) { if (!glossaryTerms.hasTerms()) { glossaryTerms.setTerms(new GlossaryTermAssociationArray()); } GlossaryTermAssociationArray glossaryTermAssociationArray = glossaryTerms.getTerms(); for (Urn glossaryTermUrn : glossaryTermUrns) { - glossaryTermAssociationArray.removeIf(association -> association.getUrn().equals(glossaryTermUrn)); + glossaryTermAssociationArray.removeIf( + association -> association.getUrn().equals(glossaryTermUrn)); } return glossaryTermAssociationArray; } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java index 5649be0c701ca..cd5202ce75b64 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/LineageService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.chart.ChartDataSourceTypeArray; @@ -24,15 +26,12 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.entity.AspectUtils.*; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -42,77 +41,96 @@ public class LineageService { private final EntityClient _entityClient; /** - * Validates that a given list of urns are all datasets and all exist. Throws error if either condition is false for any urn. + * Validates that a given list of urns are all datasets and all exist. Throws error if either + * condition is false for any urn. */ - public void validateDatasetUrns(@Nonnull final List urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDatasetUrns( + @Nonnull final List urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", urn)); + throw new IllegalArgumentException( + String.format( + "Tried to add lineage edge with non-dataset node when we expect a dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } /** - * Validates that a given list of urns are all either datasets or charts and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or charts and that they exist. + * Otherwise, throw an error. */ - public void validateDashboardUpstreamUrns(@Nonnull final List urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDashboardUpstreamUrns( + @Nonnull final List urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dashboard that isn't a chart or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Validates that a given urn exists using the entityService - */ - public void validateUrnExists(@Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { + /** Validates that a given urn exists using the entityService */ + public void validateUrnExists( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) throws Exception { if (!_entityClient.exists(urn, authentication)) { throw new IllegalArgumentException(String.format("Error: urn does not exist: %s", urn)); } } /** - * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the existing - * upstreamLineage aspect. + * Updates dataset lineage by taking in a list of upstreams to add and to remove and updating the + * existing upstreamLineage aspect. */ public void updateDatasetLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDatasetLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDatasetLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update dataset lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of UpstreamLineage for dataset entities. - */ + /** Builds an MCP of UpstreamLineage for dataset entities. */ @Nonnull public MetadataChangeProposal buildDatasetLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATASET_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATASET_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME), + authentication); UpstreamLineage upstreamLineage = new UpstreamLineage(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(Constants.UPSTREAM_LINEAGE_ASPECT_NAME).getValue().data(); upstreamLineage = new UpstreamLineage(dataMap); } @@ -129,7 +147,6 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamsToAdd.add(upstreamUrn); } - for (final Urn upstreamUrn : upstreamsToAdd) { final Upstream newUpstream = new Upstream(); newUpstream.setDataset(DatasetUrn.createFromUrn(upstreamUrn)); @@ -147,52 +164,59 @@ public MetadataChangeProposal buildDatasetLineageProposal( upstreamLineage.setUpstreams(upstreams); return buildMetadataChangeProposal( - downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage - ); + downstreamUrn, Constants.UPSTREAM_LINEAGE_ASPECT_NAME, upstreamLineage); } - /** - * Updates Chart lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Chart lineage by building and ingesting an MCP based on inputs. */ public void updateChartLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { // ensure all upstream urns are dataset urns and they exist validateDatasetUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildChartLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildChartLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } - /** - * Builds an MCP of ChartInfo for chart entities. - */ + /** Builds an MCP of ChartInfo for chart entities. */ @Nonnull public MetadataChangeProposal buildChartLineageProposal( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.CHART_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s as chart info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.CHART_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.CHART_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update chart lineage for urn %s as chart info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.CHART_INFO_ASPECT_NAME).getValue().data(); ChartInfo chartInfo = new ChartInfo(dataMap); if (!chartInfo.hasInputEdges()) { chartInfo.setInputEdges(new EdgeArray()); @@ -205,10 +229,9 @@ public MetadataChangeProposal buildChartLineageProposal( final EdgeArray inputEdges = chartInfo.getInputEdges(); final List upstreamsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamUrnsToAdd) { - if ( - inputEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputs.stream().anyMatch(input -> input.equals(upstreamUrn)) - ) { + if (inputEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputs.stream().anyMatch(input -> input.equals(upstreamUrn))) { continue; } upstreamsToAdd.add(upstreamUrn); @@ -219,7 +242,7 @@ public MetadataChangeProposal buildChartLineageProposal( } inputEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); - inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); + inputs.removeIf(input -> upstreamUrnsToRemove.contains(input.getDatasetUrn())); chartInfo.setInputEdges(inputEdges); chartInfo.setInputs(inputs); @@ -227,31 +250,33 @@ public MetadataChangeProposal buildChartLineageProposal( return buildMetadataChangeProposal(downstreamUrn, Constants.CHART_INFO_ASPECT_NAME, chartInfo); } - /** - * Updates Dashboard lineage by building and ingesting an MCP based on inputs. - */ + /** Updates Dashboard lineage by building and ingesting an MCP based on inputs. */ public void updateDashboardLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDashboardUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDashboardLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDashboardLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns and dataset urns pointing upstream. - * We need to filter out the chart urns and dataset urns separately in upstreamUrnsToAdd to add them to the correct fields. + * Builds an MCP of DashboardInfo for dashboard entities. DashboardInfo has a list of chart urns + * and dataset urns pointing upstream. We need to filter out the chart urns and dataset urns + * separately in upstreamUrnsToAdd to add them to the correct fields. */ @Nonnull public MetadataChangeProposal buildDashboardLineageProposal( @@ -259,41 +284,62 @@ public MetadataChangeProposal buildDashboardLineageProposal( @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DASHBOARD_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), authentication); - - if (entityResponse == null || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { - throw new RuntimeException(String.format("Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", downstreamUrn)); - } - - DataMap dataMap = entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); + _entityClient.getV2( + Constants.DASHBOARD_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), + authentication); + + if (entityResponse == null + || !entityResponse.getAspects().containsKey(Constants.DASHBOARD_INFO_ASPECT_NAME)) { + throw new RuntimeException( + String.format( + "Failed to update dashboard lineage for urn %s as dashboard info doesn't exist", + downstreamUrn)); + } + + DataMap dataMap = + entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME).getValue().data(); DashboardInfo dashboardInfo = new DashboardInfo(dataMap); // first, deal with chart edges - updateUpstreamCharts(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamCharts( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataset edges - updateUpstreamDatasets(dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasets( + dashboardInfo, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DASHBOARD_INFO_ASPECT_NAME, dashboardInfo); } /** - * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to chartEdges. Then, remove all lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the charts and chartEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to chartEdges. Then, remove all + * lineage edges from charts and chartEdges fields that are in upstreamUrnsToRemove. Then update + * the DashboardInfo aspect. */ - private void updateUpstreamCharts(DashboardInfo dashboardInfo, List upstreamUrnsToAdd, List upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamCharts( + DashboardInfo dashboardInfo, + List upstreamUrnsToAdd, + List upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeChartEdges(dashboardInfo); final List upstreamChartUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.CHART_ENTITY_NAME)) + .collect(Collectors.toList()); final ChartUrnArray charts = dashboardInfo.getCharts(); final EdgeArray chartEdges = dashboardInfo.getChartEdges(); - final List upstreamsChartsToAdd = getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); + final List upstreamsChartsToAdd = + getUpstreamChartToAdd(upstreamChartUrnsToAdd, chartEdges, charts); for (final Urn upstreamUrn : upstreamsChartsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, chartEdges); @@ -305,7 +351,6 @@ private void updateUpstreamCharts(DashboardInfo dashboardInfo, List upstrea dashboardInfo.setCharts(charts); } - private void initializeChartEdges(DashboardInfo dashboardInfo) { if (!dashboardInfo.hasChartEdges()) { dashboardInfo.setChartEdges(new EdgeArray()); @@ -316,15 +361,16 @@ private void initializeChartEdges(DashboardInfo dashboardInfo) { } /** - * Need to filter out any existing upstream chart urns in order to get a list of net new chart urns to add to dashboard lineage + * Need to filter out any existing upstream chart urns in order to get a list of net new chart + * urns to add to dashboard lineage */ - private List getUpstreamChartToAdd(List upstreamChartUrnsToAdd, List chartEdges, ChartUrnArray charts) { + private List getUpstreamChartToAdd( + List upstreamChartUrnsToAdd, List chartEdges, ChartUrnArray charts) { final List upstreamsChartsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamChartUrnsToAdd) { - if ( - chartEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (chartEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || charts.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamsChartsToAdd.add(upstreamUrn); @@ -332,25 +378,35 @@ private List getUpstreamChartToAdd(List upstreamChartUrnsToAdd, List chartEdges, ChartUrnArray charts, List upstreamUrnsToRemove) { + private void removeChartLineageEdges( + List chartEdges, ChartUrnArray charts, List upstreamUrnsToRemove) { chartEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); charts.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new lineage edges not already represented - * in the existing fields to datasetEdges.Then, remove all lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. - * Then update the DashboardInfo aspect. + * Updates the datasets and datasetEdges fields on the DashboardInfo aspect. First, add any new + * lineage edges not already represented in the existing fields to datasetEdges.Then, remove all + * lineage edges from datasets and datasetEdges fields that are in upstreamUrnsToRemove. Then + * update the DashboardInfo aspect. */ - private void updateUpstreamDatasets(DashboardInfo dashboardInfo, List upstreamUrnsToAdd, List upstreamUrnsToRemove, Urn dashboardUrn, Urn actor) { + private void updateUpstreamDatasets( + DashboardInfo dashboardInfo, + List upstreamUrnsToAdd, + List upstreamUrnsToRemove, + Urn dashboardUrn, + Urn actor) { initializeDatasetEdges(dashboardInfo); final List upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final UrnArray datasets = dashboardInfo.getDatasets(); final EdgeArray datasetEdges = dashboardInfo.getDatasetEdges(); - final List upstreamDatasetsToAdd = getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); + final List upstreamDatasetsToAdd = + getUpstreamDatasetsToAdd(upstreamDatasetUrnsToAdd, datasetEdges, datasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, datasetEdges); @@ -371,13 +427,13 @@ private void initializeDatasetEdges(DashboardInfo dashboardInfo) { } } - private List getUpstreamDatasetsToAdd(List upstreamDatasetUrnsToAdd, List datasetEdges, UrnArray datasets) { + private List getUpstreamDatasetsToAdd( + List upstreamDatasetUrnsToAdd, List datasetEdges, UrnArray datasets) { final List upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || datasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -385,49 +441,60 @@ private List getUpstreamDatasetsToAdd(List upstreamDatasetUrnsToAdd, L return upstreamDatasetsToAdd; } - private void removeDatasetLineageEdges(List datasetEdges, UrnArray datasets, List upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetLineageEdges( + List datasetEdges, UrnArray datasets, List upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Validates that a given list of urns are all either datasets or dataJobs and that they exist. Otherwise, throw an error. + * Validates that a given list of urns are all either datasets or dataJobs and that they exist. + * Otherwise, throw an error. */ - public void validateDataJobUpstreamUrns(@Nonnull final List urns, @Nonnull final Authentication authentication) throws Exception { + public void validateDataJobUpstreamUrns( + @Nonnull final List urns, @Nonnull final Authentication authentication) + throws Exception { for (final Urn urn : urns) { - if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", urn)); + if (!urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME) + && !urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Tried to add an upstream to a dataJob that isn't a datJob or dataset. Upstream urn: %s", + urn)); } validateUrnExists(urn, authentication); } } - /** - * Updates DataJob lineage by building and ingesting an MCP based on inputs. - */ + /** Updates DataJob lineage by building and ingesting an MCP based on inputs. */ public void updateDataJobUpstreamLineage( @Nonnull final Urn downstreamUrn, @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDataJobUpstreamUrns(upstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - MetadataChangeProposal changeProposal = buildDataJobUpstreamLineageProposal( - downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); + MetadataChangeProposal changeProposal = + buildDataJobUpstreamLineageProposal( + downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", downstreamUrn), e); } } /** - * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of dataset urns and datajob urns pointing upstream. - * We need to filter out the chart dataset and datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with downstream - * pointing datasets in outputDatasets separately. + * Builds an MCP of DataJobInputOutput for datajob entities. DataJobInputOutput has a list of + * dataset urns and datajob urns pointing upstream. We need to filter out the chart dataset and + * datajob urns separately in upstreamUrnsToAdd to add them to the correct fields. We deal with + * downstream pointing datasets in outputDatasets separately. */ @Nonnull public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @@ -435,46 +502,62 @@ public MetadataChangeProposal buildDataJobUpstreamLineageProposal( @Nonnull final List upstreamUrnsToAdd, @Nonnull final List upstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, downstreamUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + downstreamUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } // first, deal with dataset edges - updateUpstreamDatasetsForDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDatasetsForDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); // next, deal with dataJobs edges - updateUpstreamDataJobs(dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); + updateUpstreamDataJobs( + dataJobInputOutput, upstreamUrnsToAdd, upstreamUrnsToRemove, downstreamUrn, actor); - return buildMetadataChangeProposal(downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + downstreamUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } /** - * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, add any new lineage - * edges not already represented in the existing fields to inputDatasetEdges. Then, remove all lineage edges from inputDatasets - * and inputDatasetEdges fields that are in upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. + * Updates the inputDatasets and inputDatasetEdges fields on the DataJobInputOutput aspect. First, + * add any new lineage edges not already represented in the existing fields to inputDatasetEdges. + * Then, remove all lineage edges from inputDatasets and inputDatasetEdges fields that are in + * upstreamUrnsToRemove. Then update the DataJobInputOutput aspect. */ private void updateUpstreamDatasetsForDataJobs( DataJobInputOutput dataJobInputOutput, List upstreamUrnsToAdd, List upstreamUrnsToRemove, Urn dashboardUrn, - Urn actor - ) { + Urn actor) { initializeInputDatasetEdges(dataJobInputOutput); final List upstreamDatasetUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATASET_ENTITY_NAME)) + .collect(Collectors.toList()); final DatasetUrnArray inputDatasets = dataJobInputOutput.getInputDatasets(); final EdgeArray inputDatasetEdges = dataJobInputOutput.getInputDatasetEdges(); - final List upstreamDatasetsToAdd = getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); + final List upstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(upstreamDatasetUrnsToAdd, inputDatasetEdges, inputDatasets); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dashboardUrn, actor, inputDatasetEdges); @@ -495,14 +578,15 @@ private void initializeInputDatasetEdges(DataJobInputOutput dataJobInputOutput) } } - // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for the DataJobInputOutput aspect - private List getInputOutputDatasetsToAdd(List upstreamDatasetUrnsToAdd, List datasetEdges, DatasetUrnArray inputDatasets) { + // get new dataset edges that we should be adding to inputDatasetEdges and outputDatasetEdges for + // the DataJobInputOutput aspect + private List getInputOutputDatasetsToAdd( + List upstreamDatasetUrnsToAdd, List datasetEdges, DatasetUrnArray inputDatasets) { final List upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - datasetEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (datasetEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || inputDatasets.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -510,31 +594,36 @@ private List getInputOutputDatasetsToAdd(List upstreamDatasetUrnsToAdd return upstreamDatasetsToAdd; } - private void removeDatasetEdges(List datasetEdges, DatasetUrnArray datasets, List upstreamUrnsToRemove) { - datasetEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeDatasetEdges( + List datasetEdges, DatasetUrnArray datasets, List upstreamUrnsToRemove) { + datasetEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); datasets.removeIf(upstreamUrnsToRemove::contains); } /** - * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any new lineage edges not already represented - * in the existing fields to dataJobEdges.Then, remove all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. - * Then update the DataJobInputOutput aspect. + * Updates the dataJobs and dataJobEdges fields on the DataJobInputOutput aspect. First, add any + * new lineage edges not already represented in the existing fields to dataJobEdges.Then, remove + * all lineage edges from dataJobs and dataJobEdges fields that are in upstreamUrnsToRemove. Then + * update the DataJobInputOutput aspect. */ private void updateUpstreamDataJobs( DataJobInputOutput dataJobInputOutput, List upstreamUrnsToAdd, List upstreamUrnsToRemove, Urn dataJobUrn, - Urn actor - ) { + Urn actor) { initializeInputDatajobEdges(dataJobInputOutput); final List upstreamDatajobUrnsToAdd = - upstreamUrnsToAdd.stream().filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)).collect(Collectors.toList()); + upstreamUrnsToAdd.stream() + .filter(urn -> urn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); final DataJobUrnArray dataJobs = dataJobInputOutput.getInputDatajobs(); final EdgeArray dataJobEdges = dataJobInputOutput.getInputDatajobEdges(); - final List upstreamDatasetsToAdd = getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); + final List upstreamDatasetsToAdd = + getInputDatajobsToAdd(upstreamDatajobUrnsToAdd, dataJobEdges, dataJobs); for (final Urn upstreamUrn : upstreamDatasetsToAdd) { addNewEdge(upstreamUrn, dataJobUrn, actor, dataJobEdges); @@ -555,13 +644,13 @@ private void initializeInputDatajobEdges(DataJobInputOutput dataJobInputOutput) } } - private List getInputDatajobsToAdd(List upstreamDatasetUrnsToAdd, List dataJobEdges, DataJobUrnArray dataJobs) { + private List getInputDatajobsToAdd( + List upstreamDatasetUrnsToAdd, List dataJobEdges, DataJobUrnArray dataJobs) { final List upstreamDatasetsToAdd = new ArrayList<>(); for (Urn upstreamUrn : upstreamDatasetUrnsToAdd) { - if ( - dataJobEdges.stream().anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) - || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn)) - ) { + if (dataJobEdges.stream() + .anyMatch(inputEdge -> inputEdge.getDestinationUrn().equals(upstreamUrn)) + || dataJobs.stream().anyMatch(chart -> chart.equals(upstreamUrn))) { continue; } upstreamDatasetsToAdd.add(upstreamUrn); @@ -569,30 +658,33 @@ private List getInputDatajobsToAdd(List upstreamDatasetUrnsToAdd, List return upstreamDatasetsToAdd; } - private void removeInputDatajobEdges(List dataJobEdges, DataJobUrnArray dataJobs, List upstreamUrnsToRemove) { - dataJobEdges.removeIf(inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); + private void removeInputDatajobEdges( + List dataJobEdges, DataJobUrnArray dataJobs, List upstreamUrnsToRemove) { + dataJobEdges.removeIf( + inputEdge -> upstreamUrnsToRemove.contains(inputEdge.getDestinationUrn())); dataJobs.removeIf(upstreamUrnsToRemove::contains); } - /** - * Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) - */ + /** Updates DataJob lineage in the downstream direction (outputDatasets and outputDatasetEdges) */ public void updateDataJobDownstreamLineage( @Nonnull final Urn dataJobUrn, @Nonnull final List downstreamUrnsToAdd, @Nonnull final List downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { validateDatasetUrns(downstreamUrnsToAdd, authentication); - // TODO: add permissions check here for entity type - or have one overall permissions check above + // TODO: add permissions check here for entity type - or have one overall permissions check + // above try { - final MetadataChangeProposal changeProposal = buildDataJobDownstreamLineageProposal( - dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); + final MetadataChangeProposal changeProposal = + buildDataJobDownstreamLineageProposal( + dataJobUrn, downstreamUrnsToAdd, downstreamUrnsToRemove, actor, authentication); _entityClient.ingestProposal(changeProposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); + throw new RuntimeException( + String.format("Failed to update chart lineage for urn %s", dataJobUrn), e); } } @@ -603,8 +695,9 @@ private void initializeOutputDatajobEdges(DataJobInputOutput dataJobInputOutput) } /** - * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this aspect for lineage in the downstream - * direction. This includes the fields outputDatasets (deprecated) and outputDatasetEdges + * Builds an MCP of DataJobInputOutput for datajob entities. Specifically this is updating this + * aspect for lineage in the downstream direction. This includes the fields outputDatasets + * (deprecated) and outputDatasetEdges */ @Nonnull public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @@ -612,14 +705,24 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( @Nonnull final List downstreamUrnsToAdd, @Nonnull final List downstreamUrnsToRemove, @Nonnull final Urn actor, - @Nonnull final Authentication authentication - ) throws Exception { + @Nonnull final Authentication authentication) + throws Exception { final EntityResponse entityResponse = - _entityClient.getV2(Constants.DATA_JOB_ENTITY_NAME, dataJobUrn, ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), authentication); + _entityClient.getV2( + Constants.DATA_JOB_ENTITY_NAME, + dataJobUrn, + ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME), + authentication); DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME) + .getValue() + .data(); dataJobInputOutput = new DataJobInputOutput(dataMap); } @@ -628,7 +731,8 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( final DatasetUrnArray outputDatasets = dataJobInputOutput.getOutputDatasets(); final EdgeArray outputDatasetEdges = dataJobInputOutput.getOutputDatasetEdges(); - final List downstreamDatasetsToAdd = getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); + final List downstreamDatasetsToAdd = + getInputOutputDatasetsToAdd(downstreamUrnsToAdd, outputDatasetEdges, outputDatasets); for (final Urn downstreamUrn : downstreamDatasetsToAdd) { addNewEdge(downstreamUrn, dataJobUrn, actor, outputDatasetEdges); @@ -639,15 +743,15 @@ public MetadataChangeProposal buildDataJobDownstreamLineageProposal( dataJobInputOutput.setOutputDatasetEdges(outputDatasetEdges); dataJobInputOutput.setOutputDatasets(outputDatasets); - return buildMetadataChangeProposal(dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); + return buildMetadataChangeProposal( + dataJobUrn, Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, dataJobInputOutput); } private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, @Nonnull final Urn actor, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java index 7385e8aa6acae..e030404cd2607 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnerService.java @@ -1,13 +1,17 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; -import com.linkedin.common.Ownership; import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.mxe.MetadataChangeProposal; @@ -15,20 +19,16 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class OwnerService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnerService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -39,7 +39,10 @@ public OwnerService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param resources references to the resources to change * @param ownershipType the ownership type to add */ - public void batchAddOwners(@Nonnull List ownerUrns, @Nonnull List resources, @Nonnull OwnershipType ownershipType) { + public void batchAddOwners( + @Nonnull List ownerUrns, + @Nonnull List resources, + @Nonnull OwnershipType ownershipType) { batchAddOwners(ownerUrns, resources, ownershipType, this.systemAuthentication); } @@ -60,9 +63,11 @@ public void batchAddOwners( try { addOwnersToResources(ownerUrns, resources, ownershipType, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -73,7 +78,8 @@ public void batchAddOwners( * @param ownerUrns the urns of the owners to remove * @param resources references to the resources to change */ - public void batchRemoveOwners(@Nonnull List ownerUrns, @Nonnull List resources) { + public void batchRemoveOwners( + @Nonnull List ownerUrns, @Nonnull List resources) { batchRemoveOwners(ownerUrns, resources, this.systemAuthentication); } @@ -92,9 +98,11 @@ public void batchRemoveOwners( try { removeOwnersFromResources(ownerUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + ownerUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -103,18 +111,18 @@ private void addOwnersToResources( List ownerUrns, List resources, OwnershipType ownershipType, - Authentication authentication - ) throws Exception { - final List changes = buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); + Authentication authentication) + throws Exception { + final List changes = + buildAddOwnersProposals(ownerUrns, resources, ownershipType, authentication); ingestChangeProposals(changes, authentication); } private void removeOwnersFromResources( - List owners, - List resources, - Authentication authentication - ) throws Exception { - final List changes = buildRemoveOwnersProposals(owners, resources, authentication); + List owners, List resources, Authentication authentication) + throws Exception { + final List changes = + buildRemoveOwnersProposals(owners, resources, authentication); ingestChangeProposals(changes, authentication); } @@ -123,14 +131,13 @@ List buildAddOwnersProposals( List ownerUrns, List resources, OwnershipType ownershipType, - Authentication authentication - ) { + Authentication authentication) { - final Map ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + final Map ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -142,28 +149,26 @@ List buildAddOwnersProposals( if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); - owners.setLastModified(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - ); + owners.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); } addOwnersIfNotExists(owners, ownerUrns, ownershipType); - proposals.add(buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } @VisibleForTesting List buildRemoveOwnersProposals( - List ownerUrns, - List resources, - Authentication authentication - ) { - final Map ownershipAspects = getOwnershipAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new Ownership(), - authentication - ); + List ownerUrns, List resources, Authentication authentication) { + final Map ownershipAspects = + getOwnershipAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new Ownership(), + authentication); final List proposals = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -175,16 +180,15 @@ List buildRemoveOwnersProposals( owners.setOwners(new OwnerArray()); } removeOwnersIfExists(owners, ownerUrns); - proposals.add(buildMetadataChangeProposal( - resource.getUrn(), - Constants.OWNERSHIP_ASPECT_NAME, owners - )); + proposals.add( + buildMetadataChangeProposal(resource.getUrn(), Constants.OWNERSHIP_ASPECT_NAME, owners)); } return proposals; } - private void addOwnersIfNotExists(Ownership owners, List ownerUrns, OwnershipType ownershipType) { + private void addOwnersIfNotExists( + Ownership owners, List ownerUrns, OwnershipType ownershipType) { if (!owners.hasOwners()) { owners.setOwners(new OwnerArray()); } @@ -193,7 +197,8 @@ private void addOwnersIfNotExists(Ownership owners, List ownerUrns, Ownersh List ownersToAdd = new ArrayList<>(); for (Urn ownerUrn : ownerUrns) { - if (ownerAssociationArray.stream().anyMatch(association -> association.getOwner().equals(ownerUrn))) { + if (ownerAssociationArray.stream() + .anyMatch(association -> association.getOwner().equals(ownerUrn))) { continue; } ownersToAdd.add(ownerUrn); @@ -212,6 +217,7 @@ private void addOwnersIfNotExists(Ownership owners, List ownerUrns, Ownersh ownerAssociationArray.add(newOwner); } } + @VisibleForTesting static Urn mapOwnershipTypeToEntity(String type) { final String typeName = SYSTEM_ID + type.toLowerCase(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java index 821321b634881..f91f9fbfd93f8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/OwnershipTypeService.java @@ -20,39 +20,41 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub Ownership Type. - * Currently it supports creating, updating, and removing a Ownership Type. + * This class is used to permit easy CRUD operations on a DataHub Ownership Type. Currently it + * supports creating, updating, and removing a Ownership Type. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + *

TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class OwnershipTypeService extends BaseService { public static final String SYSTEM_ID = "__system__"; - public OwnershipTypeService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public OwnershipTypeService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Ownership Type. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Ownership Type * @param description optional description of the Ownership Type * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created Ownership Type */ - public Urn createOwnershipType(String name, @Nullable String description, @Nonnull Authentication authentication, + public Urn createOwnershipType( + String name, + @Nullable String description, + @Nonnull Authentication authentication, long currentTimeMs) { Objects.requireNonNull(name, "name must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -66,26 +68,33 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu ownershipTypeInfo.setName(name); ownershipTypeInfo.setDescription(description, SetMode.IGNORE_NULL); final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())).setTime(currentTimeMs); + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); ownershipTypeInfo.setCreated(auditStamp); ownershipTypeInfo.setLastModified(auditStamp); // 3. Write the new Ownership Type to GMS, return the new URN. try { - final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); - return UrnUtils.getUrn(this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - ownershipTypeInfo), authentication, false)); + final Urn entityUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.OWNERSHIP_TYPE_ENTITY_NAME); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, ownershipTypeInfo), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Ownership Type", e); } } /** - * Updates an existing Ownership Type. If a provided field is null, the previous value will be kept. + * Updates an existing Ownership Type. If a provided field is null, the previous value will be + * kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the Ownership Type * @param name optional name of the Ownership Type @@ -93,8 +102,12 @@ public Urn createOwnershipType(String name, @Nullable String description, @Nonnu * @param authentication the current authentication * @param currentTimeMs the current time in millis */ - public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullable String description, - @Nonnull Authentication authentication, long currentTimeMs) { + public void updateOwnershipType( + @Nonnull Urn urn, + @Nullable String name, + @Nullable String description, + @Nonnull Authentication authentication, + long currentTimeMs) { Objects.requireNonNull(urn, "urn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); @@ -103,7 +116,8 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab if (info == null) { throw new IllegalArgumentException( - String.format("Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); + String.format( + "Failed to update Ownership Type. Ownership Type with urn %s does not exist.", urn)); } // 2. Apply changes to existing Ownership Type @@ -115,12 +129,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab } info.setLastModified( - new AuditStamp().setTime(currentTimeMs).setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), authentication, + AspectUtils.buildMetadataChangeProposal( + urn, Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, info), + authentication, false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", urn), e); @@ -130,15 +148,16 @@ public void updateOwnershipType(@Nonnull Urn urn, @Nullable String name, @Nullab /** * Deletes an existing Ownership Type with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Ownership Type does not exist, no exception will be thrown. + *

If the Ownership Type does not exist, no exception will be thrown. * * @param urn the urn of the Ownership Type * @param authentication the current authentication */ - public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { + public void deleteOwnershipType( + @Nonnull Urn urn, boolean deleteReferences, @Nonnull Authentication authentication) { Objects.requireNonNull(urn, "Ownership TypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -146,8 +165,11 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non log.info("Soft deleting ownership type: {}", urn); final Status statusAspect = new Status(); statusAspect.setRemoved(true); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal(urn, Constants.STATUS_ASPECT_NAME, - statusAspect), authentication, false); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.STATUS_ASPECT_NAME, statusAspect), + authentication, + false); } else { this.entityClient.deleteEntity(urn, authentication); if (deleteReferences) { @@ -155,12 +177,14 @@ public void deleteOwnershipType(@Nonnull Urn urn, boolean deleteReferences, @Non } } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete Ownership Type with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to delete Ownership Type with urn %s", urn), e); } } /** * Return whether the provided urn is for a system provided ownership type. + * * @param urn the urn of the Ownership Type * @return true is the ownership type is a system default. */ @@ -169,21 +193,23 @@ private boolean isSystemOwnershipType(Urn urn) { } /** - * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link OwnershipTypeInfo} for the specified Ownership Type urn, or null + * if one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type * @param authentication the authentication to use - * - * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not exist. + * @return an instance of {@link OwnershipTypeInfo} for the Ownership Type, null if it does not + * exist. */ @Nullable - public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public OwnershipTypeInfo getOwnershipTypeInfo( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "ownershipTypeUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); - final EntityResponse response = getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { + final EntityResponse response = + getOwnershipTypeEntityResponse(ownershipTypeUrn, authentication); + if (response != null + && response.getAspects().containsKey(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME)) { return new OwnershipTypeInfo( response.getAspects().get(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME).getValue().data()); } @@ -192,24 +218,28 @@ public OwnershipTypeInfo getOwnershipTypeInfo(@Nonnull final Urn ownershipTypeUr } /** - * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Ownership Type urn, or null if + * one cannot be found. * * @param ownershipTypeUrn the urn of the Ownership Type. * @param authentication the authentication to use - * - * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not exist. + * @return an instance of {@link EntityResponse} for the Ownership Type, null if it does not + * exist. */ @Nullable - public EntityResponse getOwnershipTypeEntityResponse(@Nonnull final Urn ownershipTypeUrn, - @Nonnull final Authentication authentication) { + public EntityResponse getOwnershipTypeEntityResponse( + @Nonnull final Urn ownershipTypeUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(ownershipTypeUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - return this.entityClient.getV2(Constants.OWNERSHIP_TYPE_ENTITY_NAME, ownershipTypeUrn, - ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), authentication); + return this.entityClient.getV2( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + ownershipTypeUrn, + ImmutableSet.of(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Ownership Type with urn %s", ownershipTypeUrn), e); } } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java index b3765d1d9a4e0..ae289c067a78f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/QueryService.java @@ -27,27 +27,26 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a Query - * Currently it supports creating and removing a Query. - * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + * This class is used to permit easy CRUD operations on a Query Currently it supports creating and + * removing a Query. * + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class QueryService extends BaseService { - public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public QueryService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new Query. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param name optional name of the Query * @param description optional description of the Query @@ -56,7 +55,6 @@ public QueryService(@Nonnull EntityClient entityClient, @Nonnull Authentication * @param subjects the query subjects * @param authentication the current authentication * @param currentTimeMs the current time in millis - * * @return the urn of the newly created View */ public Urn createQuery( @@ -82,9 +80,10 @@ public Urn createQuery( queryProperties.setStatement(statement); queryProperties.setName(name, SetMode.IGNORE_NULL); queryProperties.setDescription(description, SetMode.IGNORE_NULL); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); queryProperties.setCreated(auditStamp); queryProperties.setLastModified(auditStamp); @@ -95,12 +94,17 @@ public Urn createQuery( // 3. Write the new query to GMS, return the new URN. try { final Urn entityUrn = EntityKeyUtils.convertEntityKeyToUrn(key, Constants.QUERY_ENTITY_NAME); - this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), authentication, - false); - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), authentication, - false)); + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_PROPERTIES_ASPECT_NAME, queryProperties), + authentication, + false); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + entityUrn, Constants.QUERY_SUBJECTS_ASPECT_NAME, querySubjects), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create Query", e); } @@ -109,8 +113,8 @@ public Urn createQuery( /** * Updates an existing Query. If a provided field is null, the previous value will be kept. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param urn the urn of the query * @param name optional name of the Query @@ -135,7 +139,8 @@ public void updateQuery( QueryProperties properties = getQueryProperties(urn, authentication); if (properties == null) { - throw new IllegalArgumentException(String.format("Failed to update Query. Query with urn %s does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update Query. Query with urn %s does not exist.", urn)); } // 2. Apply changes to existing Query @@ -149,17 +154,23 @@ public void updateQuery( properties.setStatement(statement); } - properties.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + properties.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { final List aspectsToIngest = new ArrayList<>(); - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, Constants.QUERY_PROPERTIES_ASPECT_NAME, properties)); if (subjects != null) { - aspectsToIngest.add(AspectUtils.buildMetadataChangeProposal(urn, Constants.QUERY_SUBJECTS_ASPECT_NAME, new QuerySubjects() - .setSubjects(new QuerySubjectArray(subjects)))); + aspectsToIngest.add( + AspectUtils.buildMetadataChangeProposal( + urn, + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new QuerySubjects().setSubjects(new QuerySubjectArray(subjects)))); } this.entityClient.batchIngestProposals(aspectsToIngest, authentication, false); } catch (Exception e) { @@ -170,17 +181,15 @@ public void updateQuery( /** * Deletes an existing Query with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the Query does not exist, no exception will be thrown. + *

If the Query does not exist, no exception will be thrown. * * @param queryUrn the urn of the Query * @param authentication the current authentication */ - public void deleteQuery( - @Nonnull Urn queryUrn, - @Nonnull Authentication authentication) { + public void deleteQuery(@Nonnull Urn queryUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(queryUrn, "queryUrn must not be null"), @@ -191,69 +200,74 @@ public void deleteQuery( } /** - * Returns an instance of {@link QueryProperties} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QueryProperties} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QueryProperties} for the Query, null if it does not exist. */ @Nullable - public QueryProperties getQueryProperties(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QueryProperties getQueryProperties( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { - return new QueryProperties(response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_PROPERTIES_ASPECT_NAME)) { + return new QueryProperties( + response.getAspects().get(Constants.QUERY_PROPERTIES_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link QuerySubjects} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link QuerySubjects} for the specified Query urn, or null if one cannot + * be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link QuerySubjects} for the Query, null if it does not exist. */ @Nullable - public QuerySubjects getQuerySubjects(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public QuerySubjects getQuerySubjects( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getQueryEntityResponse(queryUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { - return new QuerySubjects(response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.QUERY_SUBJECTS_ASPECT_NAME)) { + return new QuerySubjects( + response.getAspects().get(Constants.QUERY_SUBJECTS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified Query urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified Query urn, or null if one + * cannot be found. * * @param queryUrn the urn of the Query * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the Query, null if it does not exist. */ @Nullable - public EntityResponse getQueryEntityResponse(@Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { + public EntityResponse getQueryEntityResponse( + @Nonnull final Urn queryUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(queryUrn, "queryUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { return this.entityClient.getV2( Constants.QUERY_ENTITY_NAME, queryUrn, - ImmutableSet.of(Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), - authentication - ); + ImmutableSet.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME), + authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Query with urn %s", queryUrn), e); + throw new RuntimeException( + String.format("Failed to retrieve Query with urn %s", queryUrn), e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java index 58645166a21ef..08b14fc84d7c8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/SettingsService.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -15,20 +17,19 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * This class is used to permit easy CRUD operations on both Global and Personal * DataHub settings. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. */ @Slf4j public class SettingsService extends BaseService { - public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final Authentication systemAuthentication) { + public SettingsService( + @Nonnull final EntityClient entityClient, + @Nonnull final Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -37,37 +38,38 @@ public SettingsService(@Nonnull final EntityClient entityClient, @Nonnull final * * @param user the urn of the user to fetch settings for * @param authentication the current authentication - * * @return an instance of {@link CorpUserSettings} for the specified user, or null if none exists. */ @Nullable public CorpUserSettings getCorpUserSettings( - @Nonnull final Urn user, - @Nonnull final Authentication authentication) { + @Nonnull final Urn user, @Nonnull final Authentication authentication) { Objects.requireNonNull(user, "user must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - CORP_USER_ENTITY_NAME, - user, - ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { - return new CorpUserSettings(response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + CORP_USER_ENTITY_NAME, + user, + ImmutableSet.of(CORP_USER_SETTINGS_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.CORP_USER_SETTINGS_ASPECT_NAME)) { + return new CorpUserSettings( + response.getAspects().get(Constants.CORP_USER_SETTINGS_ASPECT_NAME).getValue().data()); } // No aspect found return null; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to retrieve Corp User settings for user with urn %s", user), e); } } /** * Updates the settings for a given user. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param user the urn of the user * @param authentication the current authentication @@ -80,13 +82,13 @@ public void updateCorpUserSettings( Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - user, - CORP_USER_SETTINGS_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + user, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Corp User settings for user with urn %s", user), e); + throw new RuntimeException( + String.format("Failed to update Corp User settings for user with urn %s", user), e); } } @@ -99,17 +101,24 @@ public void updateCorpUserSettings( public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authentication) { Objects.requireNonNull(authentication, "authentication must not be null"); try { - EntityResponse response = this.entityClient.getV2( - GLOBAL_SETTINGS_ENTITY_NAME, - GLOBAL_SETTINGS_URN, - ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), - authentication - ); - if (response != null && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { - return new GlobalSettingsInfo(response.getAspects().get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + this.entityClient.getV2( + GLOBAL_SETTINGS_ENTITY_NAME, + GLOBAL_SETTINGS_URN, + ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME), + authentication); + if (response != null + && response.getAspects().containsKey(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME)) { + return new GlobalSettingsInfo( + response + .getAspects() + .get(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .getValue() + .data()); } // No aspect found - log.warn("Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); + log.warn( + "Failed to retrieve Global Settings. No settings exist, but they should. Returning null"); return null; } catch (Exception e) { throw new RuntimeException("Failed to retrieve Global Settings!", e); @@ -119,27 +128,25 @@ public GlobalSettingsInfo getGlobalSettings(@Nonnull final Authentication authen /** * Updates the Global settings. * - * This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. + *

This performs a read-modify-write of the underlying GlobalSettingsInfo aspect. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param newSettings the new value for the global settings. * @param authentication the current authentication */ public void updateGlobalSettings( - @Nonnull final GlobalSettingsInfo newSettings, - @Nonnull final Authentication authentication) { + @Nonnull final GlobalSettingsInfo newSettings, @Nonnull final Authentication authentication) { Objects.requireNonNull(newSettings, "newSettings must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { - MetadataChangeProposal proposal = AspectUtils.buildMetadataChangeProposal( - GLOBAL_SETTINGS_URN, - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - newSettings); + MetadataChangeProposal proposal = + AspectUtils.buildMetadataChangeProposal( + GLOBAL_SETTINGS_URN, GLOBAL_SETTINGS_INFO_ASPECT_NAME, newSettings); this.entityClient.ingestProposal(proposal, authentication, false); } catch (Exception e) { throw new RuntimeException("Failed to update Global settings", e); } } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java index 9e12fc80a3cdb..a03c98411cb6f 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/TagService.java @@ -1,5 +1,8 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.entity.AspectUtils.*; + +import com.datahub.authentication.Authentication; import com.google.common.annotations.VisibleForTesting; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -7,6 +10,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.resource.ResourceReference; import com.linkedin.metadata.resource.SubResourceType; @@ -20,18 +24,14 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import com.linkedin.entity.client.EntityClient; -import com.datahub.authentication.Authentication; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j public class TagService extends BaseService { - public TagService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public TagService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } @@ -45,23 +45,26 @@ public void batchAddTags(@Nonnull List tagUrns, @Nonnull List tagUrns, @Nonnull List resources, @Nonnull Authentication authentication) { + public void batchAddTags( + @Nonnull List tagUrns, + @Nonnull List resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { addTagsToResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -71,9 +74,9 @@ public void batchAddTags(@Nonnull List tagUrns, @Nonnull List tagUrns, @Nonnull List resources) { + public void batchRemoveTags( + @Nonnull List tagUrns, @Nonnull List resources) { batchRemoveTags(tagUrns, resources, this.systemAuthentication); } @@ -83,16 +86,20 @@ public void batchRemoveTags(@Nonnull List tagUrns, @Nonnull List tagUrns, @Nonnull List resources, @Nonnull Authentication authentication) { + public void batchRemoveTags( + @Nonnull List tagUrns, + @Nonnull List resources, + @Nonnull Authentication authentication) { log.debug("Batch adding Tags to entities. tags: {}, resources: {}", resources, tagUrns); try { removeTagsFromResources(tagUrns, resources, authentication); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toList())), e); } } @@ -100,39 +107,46 @@ public void batchRemoveTags(@Nonnull List tagUrns, @Nonnull List tagUrns, List resources, - @Nonnull Authentication authentication - ) throws Exception { - final List changes = buildAddTagsProposals(tagUrns, resources, authentication); + @Nonnull Authentication authentication) + throws Exception { + final List changes = + buildAddTagsProposals(tagUrns, resources, authentication); ingestChangeProposals(changes, authentication); } private void removeTagsFromResources( - List tags, - List resources, - @Nonnull Authentication authentication - ) throws Exception { - final List changes = buildRemoveTagsProposals(tags, resources, authentication); + List tags, List resources, @Nonnull Authentication authentication) + throws Exception { + final List changes = + buildRemoveTagsProposals(tags, resources, authentication); ingestChangeProposals(changes, authentication); } @VisibleForTesting List buildAddTagsProposals( - List tagUrns, - List resources, - Authentication authentication - ) throws URISyntaxException { + List tagUrns, List resources, Authentication authentication) + throws URISyntaxException { final List changes = new ArrayList<>(); - final List entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List entityProposals = buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List schemaFieldProposals = buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List entityProposals = + buildAddTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List schemaFieldProposals = + buildAddTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -142,21 +156,27 @@ List buildAddTagsProposals( @VisibleForTesting List buildRemoveTagsProposals( - List tagUrns, - List resources, - Authentication authentication - ) { + List tagUrns, List resources, Authentication authentication) { final List changes = new ArrayList<>(); - final List entityRefs = resources.stream() - .filter(resource -> resource.getSubResource() == null || resource.getSubResource().equals("")) - .collect(Collectors.toList()); - final List entityProposals = buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); - - final List schemaFieldRefs = resources.stream() - .filter(resource -> resource.getSubResourceType() != null && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) - .collect(Collectors.toList()); - final List schemaFieldProposals = buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); + final List entityRefs = + resources.stream() + .filter( + resource -> + resource.getSubResource() == null || resource.getSubResource().equals("")) + .collect(Collectors.toList()); + final List entityProposals = + buildRemoveTagsToEntityProposals(tagUrns, entityRefs, authentication); + + final List schemaFieldRefs = + resources.stream() + .filter( + resource -> + resource.getSubResourceType() != null + && resource.getSubResourceType().equals(SubResourceType.DATASET_FIELD)) + .collect(Collectors.toList()); + final List schemaFieldProposals = + buildRemoveTagsToSubResourceProposals(tagUrns, schemaFieldRefs, authentication); changes.addAll(entityProposals); changes.addAll(schemaFieldProposals); @@ -166,15 +186,13 @@ List buildRemoveTagsProposals( @VisibleForTesting List buildAddTagsToEntityProposals( - List tagUrns, - List resources, - Authentication authentication - ) throws URISyntaxException { - final Map tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List tagUrns, List resources, Authentication authentication) + throws URISyntaxException { + final Map tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -186,11 +204,9 @@ List buildAddTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } addTagsIfNotExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } return changes; @@ -200,32 +216,37 @@ List buildAddTagsToEntityProposals( List buildAddTagsToSubResourceProposals( final List tagUrns, final List resources, - final Authentication authentication - ) throws URISyntaxException { + final Authentication authentication) + throws URISyntaxException { - final Map editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + final Map editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -233,15 +254,12 @@ List buildAddTagsToSubResourceProposals( @VisibleForTesting List buildRemoveTagsToEntityProposals( - List tagUrns, - List resources, - Authentication authentication - ) { - final Map tagsAspects = getTagsAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new GlobalTags(), - authentication - ); + List tagUrns, List resources, Authentication authentication) { + final Map tagsAspects = + getTagsAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new GlobalTags(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { @@ -253,11 +271,9 @@ List buildRemoveTagsToEntityProposals( globalTags.setTags(new TagAssociationArray()); } removeTagsIfExists(globalTags, tagUrns); - MetadataChangeProposal proposal = buildMetadataChangeProposal( - resource.getUrn(), - Constants.GLOBAL_TAGS_ASPECT_NAME, - globalTags - ); + MetadataChangeProposal proposal = + buildMetadataChangeProposal( + resource.getUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, globalTags); changes.add(proposal); } @@ -268,30 +284,34 @@ List buildRemoveTagsToEntityProposals( List buildRemoveTagsToSubResourceProposals( List tagUrns, List resources, - @Nonnull Authentication authentication - ) { - final Map editableSchemaMetadataAspects = getEditableSchemaMetadataAspects( - resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), - new EditableSchemaMetadata(), - authentication - ); + @Nonnull Authentication authentication) { + final Map editableSchemaMetadataAspects = + getEditableSchemaMetadataAspects( + resources.stream().map(ResourceReference::getUrn).collect(Collectors.toSet()), + new EditableSchemaMetadata(), + authentication); final List changes = new ArrayList<>(); for (ResourceReference resource : resources) { - EditableSchemaMetadata editableSchemaMetadata = editableSchemaMetadataAspects.get(resource.getUrn()); + EditableSchemaMetadata editableSchemaMetadata = + editableSchemaMetadataAspects.get(resource.getUrn()); if (editableSchemaMetadata == null) { continue; // Something went wrong. } - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - changes.add(buildMetadataChangeProposal(resource.getUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - editableSchemaMetadata)); + changes.add( + buildMetadataChangeProposal( + resource.getUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata)); } return changes; @@ -306,7 +326,8 @@ private void addTagsIfNotExists(GlobalTags tags, List tagUrns) throws URISy List tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -336,18 +357,16 @@ private static TagAssociationArray removeTagsIfExists(GlobalTags tags, List } private static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java index 026eb3cd61def..b4a683d2e2c68 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/ViewService.java @@ -21,35 +21,34 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - /** - * This class is used to permit easy CRUD operations on a DataHub View. - * Currently it supports creating, updating, and removing a View. + * This class is used to permit easy CRUD operations on a DataHub View. Currently it supports + * creating, updating, and removing a View. * - * Note that no Authorization is performed within the service. The expectation - * is that the caller has already verified the permissions of the active Actor. + *

Note that no Authorization is performed within the service. The expectation is that the caller + * has already verified the permissions of the active Actor. * - * TODO: Ideally we have some basic caching of the view information inside of this class. + *

TODO: Ideally we have some basic caching of the view information inside of this class. */ @Slf4j public class ViewService extends BaseService { - public ViewService(@Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { + public ViewService( + @Nonnull EntityClient entityClient, @Nonnull Authentication systemAuthentication) { super(entityClient, systemAuthentication); } /** * Creates a new DataHub View. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * * @param type the type of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition, a.k.a. the View definition * @param authentication the current authentication - * * @return the urn of the newly created View */ public Urn createView( @@ -74,43 +73,49 @@ public Urn createView( newView.setName(name); newView.setDescription(description, SetMode.IGNORE_NULL); newView.setDefinition(definition); - final AuditStamp auditStamp = new AuditStamp() - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) - .setTime(currentTimeMs); + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr())) + .setTime(currentTimeMs); newView.setCreated(auditStamp); newView.setLastModified(auditStamp); - // 3. Write the new view to GMS, return the new URN. try { - return UrnUtils.getUrn(this.entityClient.ingestProposal(AspectUtils.buildMetadataChangeProposal( - EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, newView), authentication, - false)); + return UrnUtils.getUrn( + this.entityClient.ingestProposal( + AspectUtils.buildMetadataChangeProposal( + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATAHUB_VIEW_ENTITY_NAME), + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + newView), + authentication, + false)); } catch (Exception e) { throw new RuntimeException("Failed to create View", e); } } /** - * Updates an existing DataHub View with a specific urn. The overwrites only the fields - * which are not null (provided). + * Updates an existing DataHub View with a specific urn. The overwrites only the fields which are + * not null (provided). * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation. + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation. * - * The View with the provided urn must exist, else an {@link IllegalArgumentException} will be + *

The View with the provided urn must exist, else an {@link IllegalArgumentException} will be * thrown. * - * This method will perform a read-modify-write. This can cause concurrent writes - * to conflict, and overwrite one another. The expected frequency of writes - * for views is very low, however. TODO: Convert this into a safer patch. + *

This method will perform a read-modify-write. This can cause concurrent writes to conflict, + * and overwrite one another. The expected frequency of writes for views is very low, however. + * TODO: Convert this into a safer patch. * * @param viewUrn the urn of the View * @param name the name of the View * @param description the description of the View * @param definition the view definition itself * @param authentication the current authentication - * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt field. + * @param currentTimeMs the current time in milliseconds, used for populating the lastUpdatedAt + * field. */ public void updateView( @Nonnull Urn viewUrn, @@ -126,7 +131,8 @@ public void updateView( DataHubViewInfo existingInfo = getViewInfo(viewUrn, authentication); if (existingInfo == null) { - throw new IllegalArgumentException(String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to update View. View with urn %s does not exist.", viewUrn)); } // 2. Apply changes to existing View @@ -140,15 +146,18 @@ public void updateView( existingInfo.setDefinition(definition); } - existingInfo.setLastModified(new AuditStamp() - .setTime(currentTimeMs) - .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); + existingInfo.setLastModified( + new AuditStamp() + .setTime(currentTimeMs) + .setActor(UrnUtils.getUrn(authentication.getActor().toUrnStr()))); // 3. Write changes to GMS try { this.entityClient.ingestProposal( - AspectUtils.buildMetadataChangeProposal(viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), - authentication, false); + AspectUtils.buildMetadataChangeProposal( + viewUrn, Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, existingInfo), + authentication, + false); } catch (Exception e) { throw new RuntimeException(String.format("Failed to update View with urn %s", viewUrn), e); } @@ -157,17 +166,15 @@ public void updateView( /** * Deletes an existing DataHub View with a specific urn. * - * Note that this method does not do authorization validation. - * It is assumed that users of this class have already authorized the operation + *

Note that this method does not do authorization validation. It is assumed that users of this + * class have already authorized the operation * - * If the View does not exist, no exception will be thrown. + *

If the View does not exist, no exception will be thrown. * * @param viewUrn the urn of the View * @param authentication the current authentication */ - public void deleteView( - @Nonnull Urn viewUrn, - @Nonnull Authentication authentication) { + public void deleteView(@Nonnull Urn viewUrn, @Nonnull Authentication authentication) { try { this.entityClient.deleteEntity( Objects.requireNonNull(viewUrn, "viewUrn must not be null"), @@ -178,37 +185,39 @@ public void deleteView( } /** - * Returns an instance of {@link DataHubViewInfo} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link DataHubViewInfo} for the specified View urn, or null if one + * cannot be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link DataHubViewInfo} for the View, null if it does not exist. */ @Nullable - public DataHubViewInfo getViewInfo(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public DataHubViewInfo getViewInfo( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); final EntityResponse response = getViewEntityResponse(viewUrn, authentication); - if (response != null && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { - return new DataHubViewInfo(response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); + if (response != null + && response.getAspects().containsKey(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME)) { + return new DataHubViewInfo( + response.getAspects().get(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME).getValue().data()); } // No aspect found return null; } /** - * Returns an instance of {@link EntityResponse} for the specified View urn, - * or null if one cannot be found. + * Returns an instance of {@link EntityResponse} for the specified View urn, or null if one cannot + * be found. * * @param viewUrn the urn of the View * @param authentication the authentication to use - * * @return an instance of {@link EntityResponse} for the View, null if it does not exist. */ @Nullable - public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { + public EntityResponse getViewEntityResponse( + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(authentication, "authentication must not be null"); try { @@ -216,8 +225,7 @@ public EntityResponse getViewEntityResponse(@Nonnull final Urn viewUrn, @Nonnull Constants.DATAHUB_VIEW_ENTITY_NAME, viewUrn, ImmutableSet.of(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME), - authentication - ); + authentication); } catch (Exception e) { throw new RuntimeException(String.format("Failed to retrieve View with urn %s", viewUrn), e); } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java index 928c70a7b3de1..c618db801d9d6 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java @@ -11,154 +11,169 @@ import com.linkedin.metadata.search.LineageSearchEntityArray; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.ScrollResult; -import lombok.extern.slf4j.Slf4j; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import java.util.Objects; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class ValidationUtils { - - public static SearchResult validateSearchResult(final SearchResult searchResult, - @Nonnull final EntityService entityService) { + public static SearchResult validateSearchResult( + final SearchResult searchResult, @Nonnull final EntityService entityService) { if (searchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - SearchResult validatedSearchResult = new SearchResult().setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = searchResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + searchResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedSearchResult.setEntities(validatedEntities); return validatedSearchResult; } - public static ScrollResult validateScrollResult(final ScrollResult scrollResult, - @Nonnull final EntityService entityService) { + public static ScrollResult validateScrollResult( + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { if (scrollResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ScrollResult validatedScrollResult = new ScrollResult() - .setMetadata(scrollResult.getMetadata()) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); + ScrollResult validatedScrollResult = + new ScrollResult() + .setMetadata(scrollResult.getMetadata()) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); if (scrollResult.getScrollId() != null) { validatedScrollResult.setScrollId(scrollResult.getScrollId()); } - SearchEntityArray validatedEntities = scrollResult.getEntities() - .stream() - .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) - .collect(Collectors.toCollection(SearchEntityArray::new)); + SearchEntityArray validatedEntities = + scrollResult.getEntities().stream() + .filter(searchEntity -> entityService.exists(searchEntity.getEntity())) + .collect(Collectors.toCollection(SearchEntityArray::new)); validatedScrollResult.setEntities(validatedEntities); return validatedScrollResult; } - public static BrowseResult validateBrowseResult(final BrowseResult browseResult, - @Nonnull final EntityService entityService) { + public static BrowseResult validateBrowseResult( + final BrowseResult browseResult, @Nonnull final EntityService entityService) { if (browseResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - BrowseResult validatedBrowseResult = new BrowseResult().setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = browseResult.getEntities() - .stream() - .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + browseResult.getEntities().stream() + .filter(browseResultEntity -> entityService.exists(browseResultEntity.getUrn())) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); validatedBrowseResult.setEntities(validatedEntities); - return validatedBrowseResult; } - public static ListResult validateListResult(final ListResult listResult, @Nonnull final EntityService entityService) { + public static ListResult validateListResult( + final ListResult listResult, @Nonnull final EntityService entityService) { if (listResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - ListResult validatedListResult = new ListResult().setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); UrnArray validatedEntities = - listResult.getEntities().stream().filter(entityService::exists).collect(Collectors.toCollection(UrnArray::new)); + listResult.getEntities().stream() + .filter(entityService::exists) + .collect(Collectors.toCollection(UrnArray::new)); validatedListResult.setEntities(validatedEntities); return validatedListResult; } - public static LineageSearchResult validateLineageSearchResult(final LineageSearchResult lineageSearchResult, - @Nonnull final EntityService entityService) { + public static LineageSearchResult validateLineageSearchResult( + final LineageSearchResult lineageSearchResult, @Nonnull final EntityService entityService) { if (lineageSearchResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult().setMetadata(lineageSearchResult.getMetadata()) + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) .setFrom(lineageSearchResult.getFrom()) .setPageSize(lineageSearchResult.getPageSize()) .setNumEntities(lineageSearchResult.getNumEntities()); - LineageSearchEntityArray validatedEntities = lineageSearchResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageSearchResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageSearchResult.setEntities(validatedEntities); return validatedLineageSearchResult; } - public static EntityLineageResult validateEntityLineageResult(@Nullable final EntityLineageResult entityLineageResult, + public static EntityLineageResult validateEntityLineageResult( + @Nullable final EntityLineageResult entityLineageResult, @Nonnull final EntityService entityService) { if (entityLineageResult == null) { return null; } Objects.requireNonNull(entityService, "entityService must not be null"); - final EntityLineageResult validatedEntityLineageResult = new EntityLineageResult() - .setStart(entityLineageResult.getStart()) - .setCount(entityLineageResult.getCount()) - .setTotal(entityLineageResult.getTotal()); + final EntityLineageResult validatedEntityLineageResult = + new EntityLineageResult() + .setStart(entityLineageResult.getStart()) + .setCount(entityLineageResult.getCount()) + .setTotal(entityLineageResult.getTotal()); - final LineageRelationshipArray validatedRelationships = entityLineageResult.getRelationships().stream() - .filter(relationship -> entityService.exists(relationship.getEntity())) - .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) - .collect(Collectors.toCollection(LineageRelationshipArray::new)); + final LineageRelationshipArray validatedRelationships = + entityLineageResult.getRelationships().stream() + .filter(relationship -> entityService.exists(relationship.getEntity())) + .filter(relationship -> !entityService.isSoftDeleted(relationship.getEntity())) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); validatedEntityLineageResult.setFiltered( - (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null ? entityLineageResult.getFiltered() : 0) - + entityLineageResult.getRelationships().size() - validatedRelationships.size()); + (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0) + + entityLineageResult.getRelationships().size() + - validatedRelationships.size()); validatedEntityLineageResult.setRelationships(validatedRelationships); return validatedEntityLineageResult; } - public static LineageScrollResult validateLineageScrollResult(final LineageScrollResult lineageScrollResult, - @Nonnull final EntityService entityService) { + public static LineageScrollResult validateLineageScrollResult( + final LineageScrollResult lineageScrollResult, @Nonnull final EntityService entityService) { if (lineageScrollResult == null) { return null; } @@ -173,15 +188,14 @@ public static LineageScrollResult validateLineageScrollResult(final LineageScrol validatedLineageScrollResult.setScrollId(lineageScrollResult.getScrollId()); } - LineageSearchEntityArray validatedEntities = lineageScrollResult.getEntities() - .stream() - .filter(entity -> entityService.exists(entity.getEntity())) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + LineageSearchEntityArray validatedEntities = + lineageScrollResult.getEntities().stream() + .filter(entity -> entityService.exists(entity.getEntity())) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); validatedLineageScrollResult.setEntities(validatedEntities); return validatedLineageScrollResult; } - private ValidationUtils() { - } + private ValidationUtils() {} } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java index ea59885e8b6d5..f06671ac3c314 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/systemmetadata/SystemMetadataService.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.opensearch.client.tasks.GetTaskResponse; - public interface SystemMetadataService { /** * Deletes a specific aspect from the system metadata service. @@ -32,11 +31,14 @@ public interface SystemMetadataService { List findByUrn(String urn, boolean includeSoftDeleted, int from, int size); - List findByParams(Map systemMetaParams, boolean includeSoftDeleted, int from, int size); + List findByParams( + Map systemMetaParams, boolean includeSoftDeleted, int from, int size); - List findByRegistry(String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); + List findByRegistry( + String registryName, String registryVersion, boolean includeSoftDeleted, int from, int size); - List listRuns(Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); + List listRuns( + Integer pageOffset, Integer pageSize, boolean includeSoftDeleted); void configure(); diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java index ce9134896779a..1f794157b9cb9 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/SemanticVersion.java @@ -3,19 +3,15 @@ import lombok.Builder; import lombok.Getter; - @Builder public class SemanticVersion { - @Getter - private int majorVersion; - @Getter - private int minorVersion; - @Getter - private int patchVersion; - @Getter - private String qualifier; + @Getter private int majorVersion; + @Getter private int minorVersion; + @Getter private int patchVersion; + @Getter private String qualifier; public String toString() { - return String.format(String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); + return String.format( + String.format("%d.%d.%d-%s", majorVersion, minorVersion, patchVersion, qualifier)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java index ab0848c640e2a..949572359d754 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/TimelineService.java @@ -8,14 +8,15 @@ import java.util.Set; import javax.annotation.Nonnull; - public interface TimelineService { - List getTimeline(@Nonnull final Urn urn, + List getTimeline( + @Nonnull final Urn urn, @Nonnull Set elements, long startMillis, long endMillis, String startVersionStamp, String endVersionStamp, - boolean rawDiffRequested) throws JsonProcessingException; + boolean rawDiffRequested) + throws JsonProcessingException; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java index 72218c37fe5ce..141a963c3e014 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeCategory.java @@ -5,17 +5,16 @@ import java.util.List; import java.util.Map; - public enum ChangeCategory { - //description, institutionalmemory, properties docs, field level docs/description etc. + // description, institutionalmemory, properties docs, field level docs/description etc. DOCUMENTATION, - //(field or top level) add term, remove term, etc. + // (field or top level) add term, remove term, etc. GLOSSARY_TERM, - //add new owner, remove owner, change ownership type etc. + // add new owner, remove owner, change ownership type etc. OWNER, - //new field, remove field, field type change, + // new field, remove field, field type change, TECHNICAL_SCHEMA, - //(field or top level) add tag, remove tag, + // (field or top level) add tag, remove tag, TAG, // Update the domain for an entity DOMAIN, diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java index dcd5f9d7dc2da..372e855841a36 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeEvent.java @@ -7,52 +7,36 @@ import lombok.Value; import lombok.experimental.NonFinal; - -/** - * An event representing a high-level, semantic change to a DataHub entity. - */ +/** An event representing a high-level, semantic change to a DataHub entity. */ @Value @Builder @NonFinal @AllArgsConstructor public class ChangeEvent { - /** - * The urn of the entity being changed. - */ + /** The urn of the entity being changed. */ String entityUrn; - /** - * The category of the change. - */ + + /** The category of the change. */ ChangeCategory category; - /** - * The operation of the change. - */ + + /** The operation of the change. */ ChangeOperation operation; - /** - * An optional modifier associated with the change. For example, a tag urn. - */ + + /** An optional modifier associated with the change. For example, a tag urn. */ String modifier; - /** - * Parameters that determined by the combination of category + operation. - */ + + /** Parameters that determined by the combination of category + operation. */ Map parameters; - /** - * An audit stamp detailing who made the change and when. - */ + + /** An audit stamp detailing who made the change and when. */ AuditStamp auditStamp; - /** - * Optional: Semantic change version. - * TODO: Determine if this should be inside this structure. - */ + + /** Optional: Semantic change version. TODO: Determine if this should be inside this structure. */ SemanticChangeType semVerChange; + /** - * Optional: A human readable description of this change. - * TODO: Determine if this should be inside this structure. + * Optional: A human readable description of this change. TODO: Determine if this should be inside + * this structure. */ String description; } - - - - - diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java index c9aafa6a0330d..2321165cca529 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeOperation.java @@ -1,40 +1,22 @@ package com.linkedin.metadata.timeline.data; public enum ChangeOperation { - /** - * Something is added to an entity, e.g. tag, glossary term. - */ + /** Something is added to an entity, e.g. tag, glossary term. */ ADD, - /** - * An entity is modified. e.g. Domain, description is updated. - */ + /** An entity is modified. e.g. Domain, description is updated. */ MODIFY, - /** - * Something is removed from an entity. e.g. tag, glossary term. - */ + /** Something is removed from an entity. e.g. tag, glossary term. */ REMOVE, - /** - * Entity is created. - */ + /** Entity is created. */ CREATE, - /** - * Entity is hard-deleted. - */ + /** Entity is hard-deleted. */ HARD_DELETE, - /** - * Entity is soft-deleted. - */ + /** Entity is soft-deleted. */ SOFT_DELETE, - /** - * Entity is reinstated after being soft-deleted. - */ + /** Entity is reinstated after being soft-deleted. */ REINSTATE, - /** - * Run has STARTED - */ + /** Run has STARTED */ STARTED, - /** - * Run is completed - */ + /** Run is completed */ COMPLETED } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java index 5037b8dde9a8b..3e963dba0cdb4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/ChangeTransaction.java @@ -8,7 +8,6 @@ import lombok.Getter; import lombok.Setter; - @Getter @Builder public class ChangeTransaction { @@ -19,10 +18,11 @@ public class ChangeTransaction { String semVer; SemanticChangeType semVerChange; List changeEvents; + @ArraySchema(schema = @Schema(implementation = PatchOperation.class)) JsonPatch rawDiff; - @Setter - String versionStamp; + + @Setter String versionStamp; public void setSemanticVersion(String semanticVersion) { this.semVer = semanticVersion; diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java index 6b28664bcb0f6..abbbe1af37546 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/PatchOperation.java @@ -3,7 +3,6 @@ import lombok.Builder; import lombok.Value; - @Value @Builder public class PatchOperation { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java index dfaa74a0656fe..b8ae83e34eacf 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticChangeType.java @@ -1,5 +1,9 @@ package com.linkedin.metadata.timeline.data; public enum SemanticChangeType { - NONE, PATCH, MINOR, MAJOR, EXCEPTIONAL + NONE, + PATCH, + MINOR, + MAJOR, + EXCEPTIONAL } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java index 8dc1fdcba0cbf..32af2737756a8 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeline/data/SemanticDifference.java @@ -1,4 +1,3 @@ package com.linkedin.metadata.timeline.data; -public interface SemanticDifference { -} +public interface SemanticDifference {} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java index 0908d927cd40b..54480bb700398 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/timeseries/TimeseriesAspectService.java @@ -14,16 +14,14 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public interface TimeseriesAspectService { - /** - * Configure the Time-Series aspect service one time at boot-up. - */ + /** Configure the Time-Series aspect service one time at boot-up. */ void configure(); /** * Count the number of entries using a filter + * * @param entityName the name of the entity to count entries for * @param aspectName the name of the timeseries aspect to count for that entity * @param filter the filter to apply to the count @@ -32,27 +30,30 @@ public interface TimeseriesAspectService { public long countByFilter( @Nonnull final String entityName, @Nonnull final String aspectName, - @Nullable final Filter filter - ); + @Nullable final Filter filter); /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + *

This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + *

Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull default List getAspectValues( @@ -63,28 +64,34 @@ default List getAspectValues( @Nullable final Long endTimeMillis, @Nullable final Integer limit, @Nullable final Filter filter) { - return getAspectValues(urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); + return getAspectValues( + urn, entityName, aspectName, startTimeMillis, endTimeMillis, limit, filter, null); } /** - * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional filters, sorted by the timestampMillis - * field descending. + * Retrieve a list of Time-Series Aspects for an individual entity, matching a set of optional + * filters, sorted by the timestampMillis field descending. * - * This method allows you to optionally filter for events that fall into a particular time window based on the timestampMillis - * field of the aspect, or simply retrieve the latest aspects sorted by time. + *

This method allows you to optionally filter for events that fall into a particular time + * window based on the timestampMillis field of the aspect, or simply retrieve the latest aspects + * sorted by time. * - * Note that this does not always indicate the event time, and is often used to reflect the reported - * time of a given event. + *

Note that this does not always indicate the event time, and is often used to reflect the + * reported time of a given event. * * @param urn the urn of the entity to retrieve aspects for * @param entityName the name of the entity to retrieve aspects for * @param aspectName the name of the timeseries aspect to retrieve for the entity - * @param startTimeMillis the start of a time window in milliseconds, compared against the standard timestampMillis field - * @param endTimeMillis the end of a time window in milliseconds, compared against the standard timestampMillis field + * @param startTimeMillis the start of a time window in milliseconds, compared against the + * standard timestampMillis field + * @param endTimeMillis the end of a time window in milliseconds, compared against the standard + * timestampMillis field * @param limit the maximum number of results to retrieve * @param filter a set of additional secondary filters to apply when finding the aspects - * @param sort the sort criterion for the result set. If not provided, defaults to sorting by timestampMillis descending. - * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, or empty list if none were found. + * @param sort the sort criterion for the result set. If not provided, defaults to sorting by + * timestampMillis descending. + * @return a list of {@link EnvelopedAspect} containing the Time-Series aspects that were found, + * or empty list if none were found. */ @Nonnull List getAspectValues( @@ -98,16 +105,19 @@ List getAspectValues( @Nullable final SortCriterion sort); /** - * Perform a arbitrary aggregation query over a set of Time-Series aspects. - * This is used to answer arbitrary questions about the Time-Series aspects that we have. + * Perform a arbitrary aggregation query over a set of Time-Series aspects. This is used to answer + * arbitrary questions about the Time-Series aspects that we have. * * @param entityName the name of the entity associated with the Time-Series aspect. * @param aspectName the name of the Time-Series aspect. - * @param aggregationSpecs a specification of the types of metric-value aggregations that should be performed - * @param filter an optional filter that should be applied prior to performing the requested aggregations. - * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- For example, by a particular date or - * string value. - * @return a "table" representation of the results of performing the aggregation, with a row per group. + * @param aggregationSpecs a specification of the types of metric-value aggregations that should + * be performed + * @param filter an optional filter that should be applied prior to performing the requested + * aggregations. + * @param groupingBuckets an optional set of buckets to group the aggregations on the timeline -- + * For example, by a particular date or string value. + * @return a "table" representation of the results of performing the aggregation, with a row per + * group. */ @Nonnull GenericTable getAggregatedStats( @@ -121,7 +131,7 @@ GenericTable getAggregatedStats( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @return a summary of the aspects which were deleted */ @@ -135,7 +145,7 @@ DeleteAspectValuesResult deleteAspectValues( * Generic filter based deletion for Time-Series Aspects. * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used for deletion of the documents on the index. * @param options Options to control delete parameters * @return The Job ID of the deletion operation @@ -149,18 +159,22 @@ String deleteAspectValuesAsync( /** * Reindex the index represented by entityName and aspect name, applying the filter + * * @param entityName The name of the entity. - * @param aspectName The name of the aspect. + * @param aspectName The name of the aspect. * @param filter A filter to be used when reindexing * @param options Options to control reindex parameters * @return The Job ID of the reindex operation */ - String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Nonnull Filter filter, + String reindexAsync( + @Nonnull String entityName, + @Nonnull String aspectName, + @Nonnull Filter filter, @Nonnull BatchWriteOperationsOptions options); /** - * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part of an - * ingestion rollback process. + * Rollback the Time-Series aspects associated with a particular runId. This is invoked as a part + * of an ingestion rollback process. * * @param runId The runId that needs to be rolled back. * @return a summary of the aspects which were deleted @@ -169,14 +183,15 @@ String reindexAsync(@Nonnull String entityName, @Nonnull String aspectName, @Non DeleteAspectValuesResult rollbackTimeseriesAspects(@Nonnull final String runId); /** - * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and leaks - * too much implementation detail around Elasticsearch. + * Upsert a raw timeseries aspect into a timeseries index. Note that this is a bit of a hack, and + * leaks too much implementation detail around Elasticsearch. * - * TODO: Make this more general purpose. + *

TODO: Make this more general purpose. * * @param entityName the name of the entity * @param aspectName the name of an aspect - * @param docId the doc id for the elasticsearch document - this serves as the primary key for the document. + * @param docId the doc id for the elasticsearch document - this serves as the primary key for the + * document. * @param document the raw document to insert. */ void upsertDocument( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java index 4338d883ece1d..4c5595d4d1468 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/DomainServiceTest.java @@ -32,8 +32,10 @@ public class DomainServiceTest { private static final Urn TEST_DOMAIN_URN_1 = UrnUtils.getUrn("urn:li:domain:test"); private static final Urn TEST_DOMAIN_URN_2 = UrnUtils.getUrn("urn:li:domain:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testSetDomainExistingDomain() throws Exception { @@ -41,64 +43,66 @@ private void testSetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test private void testSetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); Urn newDomainUrn = UrnUtils.getUrn("urn:li:domain:newDomain"); - List events = service.buildSetDomainProposals(newDomainUrn, ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List events = + service.buildSetDomainProposals( + newDomainUrn, + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(newDomainUrn)))); } @Test @@ -107,62 +111,62 @@ private void testUnsetDomainExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test private void testUnsetDomainNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildUnsetDomainProposals(ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null))); + List events = + service.buildUnsetDomainProposals( + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } @Test @@ -171,165 +175,178 @@ private void testAddDomainsExistingDomain() throws Exception { existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains() + .setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2)))); } @Test private void testAddDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildAddDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildAddDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsExistingDomain() throws Exception { Domains existingDomains = new Domains(); - existingDomains.setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); + existingDomains.setDomains( + new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1, TEST_DOMAIN_URN_2))); EntityClient mockClient = createMockEntityClient(existingDomains); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, + new Domains().setDomains(new UrnArray(ImmutableList.of(TEST_DOMAIN_URN_1)))); } @Test private void testRemoveDomainsNoExistingDomain() throws Exception { EntityClient mockClient = createMockEntityClient(null); - final DomainService service = new DomainService( - mockClient, - Mockito.mock(Authentication.class)); + final DomainService service = new DomainService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveDomainsProposals( - ImmutableList.of(TEST_DOMAIN_URN_2), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveDomainsProposals( + ImmutableList.of(TEST_DOMAIN_URN_2), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect1, new Domains().setDomains(new UrnArray(Collections.emptyList()))); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.DOMAINS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate domainsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Domains.class); - Assert.assertEquals(domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); + RecordTemplate domainsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Domains.class); + Assert.assertEquals( + domainsAspect2, new Domains().setDomains(new UrnArray(Collections.emptyList()))); } - private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) throws Exception { + private static EntityClient createMockEntityClient(@Nullable Domains existingDomains) + throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(existingDomains != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingDomains.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + existingDomains != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingDomains.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java index 567a457efcf93..2048548f6cede 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/GlossaryTermServiceTest.java @@ -33,57 +33,64 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class GlossaryTermServiceTest { private static final Urn TEST_GLOSSARY_TERM_URN_1 = UrnUtils.getUrn("urn:li:glossaryTerm:test"); private static final Urn TEST_GLOSSARY_TERM_URN_2 = UrnUtils.getUrn("urn:li:glossaryTerm:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); - + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + @Test private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -91,37 +98,42 @@ private void testAddGlossaryTermToEntityExistingGlossaryTerm() throws Exception private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTermAssociationArray expectedTermsArray = new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTermAssociationArray expectedTermsArray = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expectedTermsArray); } @@ -129,50 +141,72 @@ private void testAddGlossaryTermsToEntityNoExistingGlossaryTerm() throws Excepti private void testAddGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); + List events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -180,90 +214,114 @@ private void testAddGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws Ex EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List events = service.buildAddGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn))) - ); + List events = + service.buildAddGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(newGlossaryTermUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Exception { GlossaryTerms existingGlossaryTerms = new GlossaryTerms(); - existingGlossaryTerms.setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - ))); + existingGlossaryTerms.setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); EntityClient mockClient = createMockGlossaryEntityClient(existingGlossaryTerms); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); - - List events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - GlossaryTerms expected = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of(new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); + + List events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + GlossaryTerms expected = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + RecordTemplate glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2, expected); } @@ -271,36 +329,38 @@ private void testRemoveGlossaryTermToEntityExistingGlossaryTerm() throws Excepti private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exception { EntityClient mockClient = createMockGlossaryEntityClient(null); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); Urn newGlossaryTermUrn = UrnUtils.getUrn("urn:li:glossaryTerm:newGlossaryTerm"); - List events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(newGlossaryTermUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(newGlossaryTermUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect1.getTerms(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOSSARY_TERMS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlossaryTerms glossaryTermsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlossaryTerms.class); + GlossaryTerms glossaryTermsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + GlossaryTerms.class); Assert.assertEquals(glossaryTermsAspect2.getTerms(), expected); } @@ -308,51 +368,73 @@ private void testRemoveGlossaryTermsToEntityNoExistingGlossaryTerm() throws Exce private void testRemoveGlossaryTermToSchemaFieldExistingGlossaryTerm() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms().setTerms(new GlossaryTermAssociationArray( - ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_1)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms( + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_1)), + new GlossaryTermAssociation() + .setUrn( + GlossaryTermUrn.createFromUrn( + TEST_GLOSSARY_TERM_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_GLOSSARY_TERM_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - GlossaryTermAssociationArray expected = new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation() - .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)) - )); + GlossaryTermAssociationArray expected = + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromUrn(TEST_GLOSSARY_TERM_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + expected); } @Test @@ -360,78 +442,99 @@ private void testRemoveGlossaryTermsToSchemaFieldNoExistingGlossaryTerm() throws EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlossaryTerms(new GlossaryTerms()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlossaryTerms(new GlossaryTerms())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final GlossaryTermService service = new GlossaryTermService( - mockClient, - Mockito.mock(Authentication.class)); + final GlossaryTermService service = + new GlossaryTermService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveGlossaryTermsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List events = + service.buildRemoveGlossaryTermsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlossaryTerms().getTerms(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2 + .getEditableSchemaFieldInfo() + .get(0) + .getGlossaryTerms() + .getTerms(), + Collections.emptyList()); } - private static EntityClient createMockGlossaryEntityClient(@Nullable GlossaryTerms existingGlossaryTerms) throws Exception { + private static EntityClient createMockGlossaryEntityClient( + @Nullable GlossaryTerms existingGlossaryTerms) throws Exception { return createMockEntityClient(existingGlossaryTerms, Constants.GLOSSARY_TERMS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java index 9df8b9ecf46e8..5888067dbe268 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/LineageServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -46,8 +48,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class LineageServiceTest { private static AuditStamp _auditStamp; private static EntityClient _mockClient; @@ -57,18 +57,25 @@ public class LineageServiceTest { private static final String SOURCE_FIELD_NAME = "source"; private static final String UI_SOURCE = "UI"; private static final String ACTOR_URN = "urn:li:corpuser:test"; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN_1 = "urn:li:chart:(looker,baz1)"; private static final String CHART_URN_2 = "urn:li:chart:(looker,baz2)"; private static final String CHART_URN_3 = "urn:li:chart:(looker,baz3)"; private static final String DASHBOARD_URN_1 = "urn:li:dashboard:(airflow,id1)"; private static final String DASHBOARD_URN_2 = "urn:li:dashboard:(airflow,id2)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; - private static final String DATAJOB_URN_3 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_3 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test3)"; private Urn actorUrn; private Urn datasetUrn1; private Urn datasetUrn2; @@ -114,41 +121,41 @@ public void testUpdateDatasetLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - UpstreamLineage upstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); + UpstreamLineage upstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_3, DATASET_URN_4))); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(datasetUrn1), - Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(datasetUrn1), + Mockito.eq(ImmutableSet.of(Constants.UPSTREAM_LINEAGE_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datasetUrn1) .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.UPSTREAM_LINEAGE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.UPSTREAM_LINEAGE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(upstreamLineage.data())))))); final List upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // upstreamLineage without dataset3, keep dataset4, add dataset2 - final UpstreamLineage updatedDataset1UpstreamLineage = createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); + final UpstreamLineage updatedDataset1UpstreamLineage = + createUpstreamLineage(new ArrayList<>(Arrays.asList(DATASET_URN_4, DATASET_URN_2))); final MetadataChangeProposal proposal1 = new MetadataChangeProposal(); proposal1.setEntityUrn(UrnUtils.getUrn(DATASET_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.UPSTREAM_LINEAGE_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(updatedDataset1UpstreamLineage)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal1), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -157,8 +164,11 @@ public void testFailUpdateWithMissingDataset() throws Exception { final List upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List upstreamUrnsToRemove = Collections.singletonList(datasetUrn3); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -167,11 +177,15 @@ public void testFailUpdateDatasetWithInvalidEdge() throws Exception { final List upstreamUrnsToAdd = Collections.singletonList(chartUrn1); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDatasetLineage(datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDatasetLineage( + datasetUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to dataset2 + // Adds upstream for chart1 to dataset3 and removes edge to dataset1 while keeping edge to + // dataset2 @Test public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn1, AUTHENTICATION)).thenReturn(true); @@ -179,30 +193,37 @@ public void testUpdateChartLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn2, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn3, AUTHENTICATION)).thenReturn(true); - ChartInfo chartInfo = createChartInfo(chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); + ChartInfo chartInfo = + createChartInfo( + chartUrn1, Arrays.asList(datasetUrn1, datasetUrn2), Collections.emptyList()); - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.CHART_ENTITY_NAME), - Mockito.eq(chartUrn1), - Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.CHART_ENTITY_NAME), + Mockito.eq(chartUrn1), + Mockito.eq(ImmutableSet.of(Constants.CHART_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(chartUrn1) .setEntityName(Constants.CHART_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.CHART_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(chartInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CHART_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(chartInfo.data())))))); final List upstreamUrnsToAdd = Collections.singletonList(datasetUrn3); final List upstreamUrnsToRemove = Collections.singletonList(datasetUrn2); - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); // chartInfo with dataset1 in inputs and dataset3 in inputEdges - ChartInfo updatedChartInfo = createChartInfo(chartUrn1, Collections.singletonList(datasetUrn1), Collections.singletonList(datasetUrn3)); + ChartInfo updatedChartInfo = + createChartInfo( + chartUrn1, + Collections.singletonList(datasetUrn1), + Collections.singletonList(datasetUrn3)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(chartUrn1); @@ -210,11 +231,8 @@ public void testUpdateChartLineage() throws Exception { proposal.setAspectName(Constants.CHART_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedChartInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -223,8 +241,11 @@ public void testFailUpdateChartWithMissingDataset() throws Exception { final List upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -234,8 +255,11 @@ public void testFailUpdateChartWithInvalidEdge() throws Exception { // charts can't have charts upstream of them final List upstreamUrnsToAdd = Collections.singletonList(chartUrn2); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateChartLineage(chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateChartLineage( + chartUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } // Adds upstreams for dashboard to dataset2 and chart2 and removes edge to dataset1 and chart1 @@ -248,42 +272,44 @@ public void testUpdateDashboardLineage() throws Exception { Mockito.when(_mockClient.exists(chartUrn2, AUTHENTICATION)).thenReturn(true); // existing dashboardInfo has upstreams to dataset1, dataset3, chart1, chart3 - DashboardInfo dashboardInfo = createDashboardInfo( - dashboardUrn1, - Arrays.asList(chartUrn1, chartUrn3), - Collections.emptyList(), - Arrays.asList(datasetUrn1, datasetUrn3), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(dashboardUrn1), - Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DashboardInfo dashboardInfo = + createDashboardInfo( + dashboardUrn1, + Arrays.asList(chartUrn1, chartUrn3), + Collections.emptyList(), + Arrays.asList(datasetUrn1, datasetUrn3), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(dashboardUrn1), + Mockito.eq(ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(dashboardUrn1) .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DASHBOARD_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DASHBOARD_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dashboardInfo.data())))))); final List upstreamUrnsToAdd = Arrays.asList(datasetUrn2, chartUrn2); final List upstreamUrnsToRemove = Arrays.asList(datasetUrn1, chartUrn1); - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, datasetUrn2 in datasetEdges - DashboardInfo updatedDashboardInfo = createDashboardInfo( - dashboardUrn1, - Collections.singletonList(chartUrn3), - Collections.singletonList(chartUrn2), - Arrays.asList(datasetUrn3), - Collections.singletonList(datasetUrn2) - ); + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + // dashboardInfo with chartUrn3 in charts, chartUrn2 in chartEdges, datasetUrn3 in datasets, + // datasetUrn2 in datasetEdges + DashboardInfo updatedDashboardInfo = + createDashboardInfo( + dashboardUrn1, + Collections.singletonList(chartUrn3), + Collections.singletonList(chartUrn2), + Arrays.asList(datasetUrn3), + Collections.singletonList(datasetUrn2)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(dashboardUrn1); @@ -291,11 +317,8 @@ public void testUpdateDashboardLineage() throws Exception { proposal.setAspectName(Constants.DASHBOARD_INFO_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDashboardInfo)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -304,8 +327,11 @@ public void testFailUpdateDashboardWithMissingDataset() throws Exception { final List upstreamUrnsToAdd = Collections.singletonList(datasetUrn2); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -315,11 +341,15 @@ public void testFailUpdateDashboardWithInvalidEdge() throws Exception { // dashboards can't have dashboards upstream of them final List upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDashboardLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDashboardLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } - // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, upstream dataset1, downstream dataset1 + // Adds upstream datajob3, upstream dataset3, downstream dataset4, removes upstream datajob2, + // upstream dataset1, downstream dataset1 // has existing upstream datajob2, upstream dataset1 and dataset2, downstream dataset4 // Should result in upstream datajob3, upstream dataset3 and dataset2, downstream dataset5 @Test @@ -332,66 +362,71 @@ public void testUpdateDataJobLineage() throws Exception { Mockito.when(_mockClient.exists(datasetUrn4, AUTHENTICATION)).thenReturn(true); Mockito.when(_mockClient.exists(datasetUrn1, AUTHENTICATION)).thenReturn(true); - DataJobInputOutput firstDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1, datasetUrn2), - Collections.emptyList(), - Collections.singletonList(datajobUrn2), - Collections.emptyList(), - Collections.singletonList(datasetUrn1), - Collections.emptyList() - ); - - DataJobInputOutput secondDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Arrays.asList(datasetUrn1), - Collections.emptyList() - ); - - Mockito.when(_mockClient.getV2( - Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), - Mockito.eq(datajobUrn1), - Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), - Mockito.eq(AUTHENTICATION) - )) + DataJobInputOutput firstDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1, datasetUrn2), + Collections.emptyList(), + Collections.singletonList(datajobUrn2), + Collections.emptyList(), + Collections.singletonList(datasetUrn1), + Collections.emptyList()); + + DataJobInputOutput secondDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Arrays.asList(datasetUrn1), + Collections.emptyList()); + + Mockito.when( + _mockClient.getV2( + Mockito.eq(Constants.DATA_JOB_ENTITY_NAME), + Mockito.eq(datajobUrn1), + Mockito.eq(ImmutableSet.of(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME)), + Mockito.eq(AUTHENTICATION))) .thenReturn( new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(firstDataJobInputOutput.data())) - ))), + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(firstDataJobInputOutput.data()))))), new EntityResponse() .setUrn(datajobUrn1) .setEntityName(Constants.DATA_JOB_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(secondDataJobInputOutput.data())) - ))) - ); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(secondDataJobInputOutput.data())))))); final List upstreamUrnsToAdd = Arrays.asList(datajobUrn3, datasetUrn3); final List upstreamUrnsToRemove = Arrays.asList(datajobUrn2, datasetUrn2); - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION); final List downstreamUrnsToAdd = Arrays.asList(datasetUrn4); final List downstreamUrnsToRemove = Arrays.asList(datasetUrn1); - _lineageService.updateDataJobDownstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); - - DataJobInputOutput updatedDataJobInputOutput = createDataJobInputOutput( - datajobUrn1, - Arrays.asList(datasetUrn1), - Arrays.asList(datasetUrn3), - Collections.emptyList(), - Arrays.asList(datajobUrn3), - Collections.emptyList(), - Collections.singletonList(datasetUrn4) - ); + _lineageService.updateDataJobDownstreamLineage( + datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION); + + DataJobInputOutput updatedDataJobInputOutput = + createDataJobInputOutput( + datajobUrn1, + Arrays.asList(datasetUrn1), + Arrays.asList(datasetUrn3), + Collections.emptyList(), + Arrays.asList(datajobUrn3), + Collections.emptyList(), + Collections.singletonList(datasetUrn4)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(datajobUrn1); @@ -399,11 +434,8 @@ public void testUpdateDataJobLineage() throws Exception { proposal.setAspectName(Constants.DATA_JOB_INPUT_OUTPUT_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(updatedDataJobInputOutput)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(_mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.eq(AUTHENTICATION), - Mockito.eq(false) - ); + Mockito.verify(_mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.eq(AUTHENTICATION), Mockito.eq(false)); } @Test @@ -412,8 +444,11 @@ public void testFailUpdateUpstreamDataJobWithMissingUrnToAdd() throws Exception final List upstreamUrnsToAdd = Arrays.asList(datajobUrn3); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobUpstreamLineage(dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + dashboardUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -423,8 +458,11 @@ public void testFailUpdateUpstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards upstream of them final List upstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List upstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, upstreamUrnsToAdd, upstreamUrnsToRemove, actorUrn, AUTHENTICATION)); } @Test @@ -433,8 +471,15 @@ public void testFailUpdateDownstreamDataJobWithMissingUrnToAdd() throws Exceptio final List downstreamUrnsToAdd = Arrays.asList(datasetUrn1); final List downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(IllegalArgumentException.class, () -> - _lineageService.updateDataJobDownstreamLineage(dashboardUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + IllegalArgumentException.class, + () -> + _lineageService.updateDataJobDownstreamLineage( + dashboardUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } @Test @@ -444,8 +489,15 @@ public void testFailUpdateDownstreamDataJobWithInvalidEdge() throws Exception { // dataJobs can't have dashboards downstream of them final List downstreamUrnsToAdd = Collections.singletonList(dashboardUrn2); final List downstreamUrnsToRemove = Collections.emptyList(); - assertThrows(RuntimeException.class, () -> - _lineageService.updateDataJobUpstreamLineage(datajobUrn1, downstreamUrnsToAdd, downstreamUrnsToRemove, actorUrn, AUTHENTICATION)); + assertThrows( + RuntimeException.class, + () -> + _lineageService.updateDataJobUpstreamLineage( + datajobUrn1, + downstreamUrnsToAdd, + downstreamUrnsToRemove, + actorUrn, + AUTHENTICATION)); } private UpstreamLineage createUpstreamLineage(List upstreamUrns) throws Exception { @@ -466,7 +518,8 @@ private UpstreamLineage createUpstreamLineage(List upstreamUrns) throws return upstreamLineage; } - private ChartInfo createChartInfo(Urn entityUrn, List inputsToAdd, List inputEdgesToAdd) throws Exception { + private ChartInfo createChartInfo(Urn entityUrn, List inputsToAdd, List inputEdgesToAdd) + throws Exception { ChartInfo chartInfo = new ChartInfo(); ChartDataSourceTypeArray inputs = new ChartDataSourceTypeArray(); for (Urn input : inputsToAdd) { @@ -489,8 +542,8 @@ private DashboardInfo createDashboardInfo( List chartsToAdd, List chartEdgesToAdd, List datasetsToAdd, - List datasetEdgesToAdd - ) throws Exception { + List datasetEdgesToAdd) + throws Exception { final DashboardInfo dashboardInfo = new DashboardInfo(); final ChartUrnArray charts = new ChartUrnArray(); @@ -525,8 +578,8 @@ private DataJobInputOutput createDataJobInputOutput( List inputDatajobsToAdd, List inputDatajobEdgesToAdd, List outputDatasetsToAdd, - List outputDatasetEdgesToAdd - ) throws Exception { + List outputDatasetEdgesToAdd) + throws Exception { final DataJobInputOutput dataJobInputOutput = new DataJobInputOutput(); final DatasetUrnArray inputDatasets = new DatasetUrnArray(); @@ -571,8 +624,7 @@ private DataJobInputOutput createDataJobInputOutput( private void addNewEdge( @Nonnull final Urn upstreamUrn, @Nonnull final Urn downstreamUrn, - @Nonnull final EdgeArray edgeArray - ) { + @Nonnull final EdgeArray edgeArray) { final Edge newEdge = new Edge(); newEdge.setDestinationUrn(upstreamUrn); newEdge.setSourceUrn(downstreamUrn); diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java index c23a151e52734..fde1c32d53a92 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnerServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.service.OwnerService.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -29,61 +31,59 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.service.OwnerService.*; - - public class OwnerServiceTest { private static final Urn TEST_OWNER_URN_1 = UrnUtils.getUrn("urn:li:corpuser:test"); private static final Urn TEST_OWNER_URN_2 = UrnUtils.getUrn("urn:li:corpuser:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddOwnersExistingOwner() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newTag"); - List events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expected = new OwnerArray( - ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), - new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())) - )); + List events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expected = + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.NONE), + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expected); } @@ -91,81 +91,86 @@ private void testAddOwnersExistingOwner() throws Exception { private void testAddOwnersNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newOwnerUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List events = service.buildAddOwnersProposals( - ImmutableList.of(newOwnerUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - OwnershipType.NONE, - mockAuthentication()); - - OwnerArray expectedOwners = new OwnerArray( - ImmutableList.of(new Owner().setOwner(newOwnerUrn).setType(OwnershipType.NONE) - .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); + List events = + service.buildAddOwnersProposals( + ImmutableList.of(newOwnerUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + OwnershipType.NONE, + mockAuthentication()); + + OwnerArray expectedOwners = + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(newOwnerUrn) + .setType(OwnershipType.NONE) + .setTypeUrn(mapOwnershipTypeToEntity(OwnershipType.NONE.toString())))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect1.getOwners(), expectedOwners); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownerAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownerAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownerAspect2.getOwners(), expectedOwners); } @Test private void testRemoveOwnerExistingOwners() throws Exception { Ownership existingOwnership = new Ownership(); - existingOwnership.setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), - new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD) - ))); + existingOwnership.setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setOwner(TEST_OWNER_URN_1).setType(OwnershipType.TECHNICAL_OWNER), + new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); EntityClient mockClient = createMockOwnersClient(existingOwnership); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); - - List events = service.buildRemoveOwnersProposals( - ImmutableList.of(TEST_OWNER_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - Ownership expected = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner().setOwner(TEST_OWNER_URN_2).setType(OwnershipType.DATA_STEWARD)))); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); + + List events = + service.buildRemoveOwnersProposals( + ImmutableList.of(TEST_OWNER_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + Ownership expected = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(TEST_OWNER_URN_2) + .setType(OwnershipType.DATA_STEWARD)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + RecordTemplate ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2, expected); } @@ -173,68 +178,72 @@ private void testRemoveOwnerExistingOwners() throws Exception { private void testRemoveOwnerNoExistingOwners() throws Exception { EntityClient mockClient = createMockOwnersClient(null); - final OwnerService service = new OwnerService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnerService service = new OwnerService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:corpuser:newOwner"); - List events = service.buildRemoveOwnersProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveOwnersProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); OwnerArray expected = new OwnerArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect1.getOwners(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.OWNERSHIP_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - Ownership ownersAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - Ownership.class); + Ownership ownersAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), Ownership.class); Assert.assertEquals(ownersAspect2.getOwners(), expected); } - private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) throws Exception { + private static EntityClient createMockOwnersClient(@Nullable Ownership existingOwnership) + throws Exception { return createMockEntityClient(existingOwnership, Constants.OWNERSHIP_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -243,4 +252,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java index dcb4a745732b2..65ca25fc8524d 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/OwnershipTypeServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -23,8 +25,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - public class OwnershipTypeServiceTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = UrnUtils.getUrn("urn:li:ownershipType:test"); @@ -34,80 +34,62 @@ public class OwnershipTypeServiceTest { private void testCreateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createOwnershipType( - "test OwnershipType", - "my description", - mockAuthentication(), - 0L - ); + Urn urn = + service.createOwnershipType( + "test OwnershipType", "my description", mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createOwnershipType( - "test OwnershipType", - null, - mockAuthentication(), - 0L - ); + urn = service.createOwnershipType("test OwnershipType", null, mockAuthentication(), 0L); Assert.assertEquals(urn, TEST_OWNERSHIP_TYPE_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateOwnershipTypeErrorMissingInputs() throws Exception { final EntityClient mockClient = createOwnershipTypeMockEntityClient(); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Only case: missing OwnershipType Name Assert.assertThrows( RuntimeException.class, - () -> service.createOwnershipType( - null, - "my description", - mockAuthentication(), - 0L - ) - ); + () -> service.createOwnershipType(null, "my description", mockAuthentication(), 0L)); } @Test private void testCreateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createOwnershipType( - "new name", - "my description", - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.createOwnershipType("new name", "my description", mockAuthentication(), 1L)); } @Test @@ -117,174 +99,134 @@ private void testUpdateOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); - - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); + + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; // Case 1: Update name only - service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateOwnershipType(TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, oldDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 2: Update description only service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - null, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, null, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, oldName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateOwnershipTypeMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - oldName, - oldDescription, - TEST_USER_URN, - 0L, - 0L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, oldName, oldDescription, TEST_USER_URN, 0L, 0L); // Case 3: Update all fields at once service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - newDescription, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateOwnershipTypeProposal(TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateOwnershipTypeProposal( + TEST_OWNERSHIP_TYPE_URN, newName, newDescription, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateOwnershipTypeMissingOwnershipType() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - newName, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, newName, null, mockAuthentication(), 1L)); } @Test private void testUpdateOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateOwnershipType( - TEST_OWNERSHIP_TYPE_URN, - "new name", - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateOwnershipType( + TEST_OWNERSHIP_TYPE_URN, "new name", null, mockAuthentication(), 1L)); } @Test private void testDeleteOwnershipTypeSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, true, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntityReferences( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntityReferences( + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteOwnershipTypeError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_OWNERSHIP_TYPE_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, + Assert.assertThrows( + RuntimeException.class, () -> service.deleteOwnershipType(TEST_OWNERSHIP_TYPE_URN, false, mockAuthentication())); } @@ -296,20 +238,13 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { final String description = "description"; resetGetOwnershipTypeInfoMockEntityClient( - mockClient, - TEST_OWNERSHIP_TYPE_URN, - name, - description, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_OWNERSHIP_TYPE_URN, name, description, TEST_USER_URN, 0L, 1L); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); - final OwnershipTypeInfo info = service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); + final OwnershipTypeInfo info = + service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) info.getCreated().getTime(), 0L); @@ -323,37 +258,40 @@ private void testGetOwnershipTypeInfoSuccess() throws Exception { private void testGetOwnershipTypeInfoNoOwnershipTypeExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); - } @Test private void testGetOwnershipTypeInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(TEST_OWNERSHIP_TYPE_URN), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(TEST_OWNERSHIP_TYPE_URN), + Mockito.eq( + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, Constants.STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final OwnershipTypeService service = new OwnershipTypeService( - mockClient, - Mockito.mock(Authentication.class)); + final OwnershipTypeService service = + new OwnershipTypeService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getOwnershipTypeInfo(TEST_OWNERSHIP_TYPE_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( @@ -380,10 +318,12 @@ private static MetadataChangeProposal buildUpdateOwnershipTypeProposal( private static EntityClient createOwnershipTypeMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_OWNERSHIP_TYPE_URN.toString()); return mockClient; } @@ -394,34 +334,40 @@ private static void resetUpdateOwnershipTypeMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(ownershipTypeUrn.toString()); - - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(ownershipTypeUrn.toString()); + + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetOwnershipTypeInfoMockEntityClient( @@ -431,29 +377,33 @@ private static void resetGetOwnershipTypeInfoMockEntityClient( final String existingDescription, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final OwnershipTypeInfo existingInfo = new OwnershipTypeInfo() - .setName(existingName) - .setDescription(existingDescription) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(ownershipTypeUrn), - Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final OwnershipTypeInfo existingInfo = + new OwnershipTypeInfo() + .setName(existingName) + .setDescription(existingDescription) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(ownershipTypeUrn), + Mockito.eq(ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(ownershipTypeUrn) .setEntityName(OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -461,5 +411,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } - -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java index 6ef0065b4d5db..5726dcc6cd17a 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/QueryServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -31,144 +33,138 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class QueryServiceTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test private void testCreateQuerySuccess() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: All fields provided - Urn urn = service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - ); + Urn urn = + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); // Ingests both aspects - properties and subjects - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Null fields provided - urn = service.createQuery( - null, - null, - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ); + urn = + service.createQuery( + null, + null, + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_QUERY_URN); - Mockito.verify(mockClient, Mockito.times(4)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(4)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateQueryErrorMissingInputs() throws Exception { final EntityClient mockClient = createQueryMockEntityClient(); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing Query Source Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - null, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createQuery( + null, + null, + null, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 2: missing Query Statement Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - null, - ImmutableList.of(), - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + null, + ImmutableList.of(), + mockAuthentication(), + 0L)); // Case 3: missing Query Subjects Assert.assertThrows( RuntimeException.class, - () -> service.createQuery( - null, - null, - QuerySource.MANUAL, // Cannot be null - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createQuery( + null, + null, + QuerySource.MANUAL, // Cannot be null + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + null, + mockAuthentication(), + 0L)); } @Test private void testCreateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createQuery( - "test query", - "my description", - QuerySource.MANUAL, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 0L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createQuery( + "test query", + "my description", + QuerySource.MANUAL, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 0L)); } @Test private void testUpdateQuerySuccess() throws Exception { final String oldName = "old name"; final String oldDescription = "old description"; - final QueryStatement oldStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement oldStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -181,35 +177,34 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final QueryStatement newStatement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); - final List newSubjects = ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); + final QueryStatement newStatement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final List newSubjects = + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)); // Case 1: Update name only - service.updateQuery( - TEST_QUERY_URN, - newName, - null, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, oldDescription, QuerySource.MANUAL, oldStatement, - 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, newName, null, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -220,26 +215,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateQuery( - TEST_QUERY_URN, - null, - newDescription, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, newDescription, QuerySource.MANUAL, - oldStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, newDescription, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + newDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -250,26 +244,25 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - newStatement, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of(buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, - newStatement, 0L, 1L))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, newStatement, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L))), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetQueryPropertiesClient( mockClient, @@ -280,27 +273,26 @@ private void testUpdateQuerySuccess() throws Exception { oldStatement, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update subjects only - service.updateQuery( - TEST_QUERY_URN, - null, - null, - null, - newSubjects, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, oldName, oldDescription, QuerySource.MANUAL, oldStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateQuery(TEST_QUERY_URN, null, null, null, newSubjects, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + oldName, + oldDescription, + QuerySource.MANUAL, + oldStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 5: Update all fields service.updateQuery( @@ -310,103 +302,106 @@ private void testUpdateQuerySuccess() throws Exception { newStatement, newSubjects, mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals( - Mockito.eq(ImmutableList.of( - buildUpdateQueryPropertiesProposal(TEST_QUERY_URN, newName, newDescription, QuerySource.MANUAL, newStatement, 0L, 1L), - buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects) - )), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + Mockito.eq( + ImmutableList.of( + buildUpdateQueryPropertiesProposal( + TEST_QUERY_URN, + newName, + newDescription, + QuerySource.MANUAL, + newStatement, + 0L, + 1L), + buildUpdateQuerySubjectsProposal(TEST_QUERY_URN, newSubjects))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateQueryMissingQuery() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testUpdateQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateQuery( - TEST_QUERY_URN, - "new name", - null, - new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateQuery( + TEST_QUERY_URN, + "new name", + null, + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"), + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)), + mockAuthentication(), + 1L)); } @Test private void testDeleteQuerySuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); service.deleteQuery(TEST_QUERY_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteQueryError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteQuery(TEST_QUERY_URN, mockAuthentication())); } @Test @@ -415,7 +410,8 @@ private void testGetQueryPropertiesSuccess() throws Exception { final String name = "name"; final String description = "description"; - final QueryStatement statement = new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); + final QueryStatement statement = + new QueryStatement().setLanguage(QueryLanguage.SQL).setValue("SELECT * FROM Table"); resetQueryPropertiesClient( mockClient, @@ -426,14 +422,12 @@ private void testGetQueryPropertiesSuccess() throws Exception { statement, TEST_USER_URN, 0L, - 1L - ); + 1L); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QueryProperties properties = service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); + final QueryProperties properties = + service.getQueryProperties(TEST_QUERY_URN, mockAuthentication()); // Assert that the info is correct. Assert.assertEquals((long) properties.getCreated().getTime(), 0L); @@ -449,16 +443,17 @@ private void testGetQueryPropertiesSuccess() throws Exception { private void testGetQueryPropertiesNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -467,38 +462,40 @@ private void testGetQueryPropertiesNoQueryExists() throws Exception { private void testGetQueryPropertiesError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of( + QUERY_PROPERTIES_ASPECT_NAME, Constants.QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @Test private void testGetQuerySubjectsSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - resetQuerySubjectsClient( - mockClient, - TEST_QUERY_URN, - existingSubjects - ); + resetQuerySubjectsClient(mockClient, TEST_QUERY_URN, existingSubjects); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); - final QuerySubjects querySubjects = service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); + final QuerySubjects querySubjects = + service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication()); Assert.assertEquals(querySubjects, existingSubjects); } @@ -507,16 +504,16 @@ private void testGetQuerySubjectsSuccess() throws Exception { private void testGetQuerySubjectsNoQueryExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getQueryProperties(TEST_QUERY_URN, mockAuthentication())); } @@ -525,23 +522,24 @@ private void testGetQuerySubjectsNoQueryExists() throws Exception { private void testGetQuerySubjectsError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final QueryService service = new QueryService( - mockClient, - Mockito.mock(Authentication.class)); + final QueryService service = new QueryService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, + () -> service.getQuerySubjects(TEST_QUERY_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateQuerySubjectsProposal( - final Urn urn, - final List querySubjects) { + final Urn urn, final List querySubjects) { QuerySubjects subjects = new QuerySubjects(); subjects.setSubjects(new QuerySubjectArray(querySubjects)); @@ -583,10 +581,12 @@ private static MetadataChangeProposal buildUpdateQueryPropertiesProposal( private static EntityClient createQueryMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_QUERY_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_QUERY_URN.toString()); return mockClient; } @@ -599,63 +599,75 @@ private static void resetQueryPropertiesClient( final QueryStatement existingStatement, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - final QueryProperties existingProperties = new QueryProperties() - .setSource(existingSource) - .setName(existingName) - .setDescription(existingDescription) - .setStatement(existingStatement) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + final QueryProperties existingProperties = + new QueryProperties() + .setSource(existingSource) + .setName(existingName) + .setDescription(existingDescription) + .setStatement(existingStatement) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingProperties.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(existingProperties.data())))))); } private static void resetQuerySubjectsClient( - final EntityClient mockClient, - final Urn queryUrn, - final QuerySubjects subjects) throws Exception { + final EntityClient mockClient, final Urn queryUrn, final QuerySubjects subjects) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(queryUrn.toString()); - - Mockito.when(mockClient.getV2( - Mockito.eq(QUERY_ENTITY_NAME), - Mockito.eq(queryUrn), - Mockito.eq(ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(queryUrn.toString()); + + Mockito.when( + mockClient.getV2( + Mockito.eq(QUERY_ENTITY_NAME), + Mockito.eq(queryUrn), + Mockito.eq( + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(queryUrn) .setEntityName(QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(subjects.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(subjects.data())))))); } private static Authentication mockAuthentication() { @@ -663,4 +675,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java index 43ebc53385ad4..b034111e7825f 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/SettingsServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,9 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class SettingsServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -35,26 +34,28 @@ public class SettingsServiceTest { @Test private static void testGetCorpUserSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings existingSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - - final SettingsService service = new SettingsService( - getCorpUserSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); - - final CorpUserSettings res = service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); + final CorpUserSettings existingSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + + final SettingsService service = + new SettingsService( + getCorpUserSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); + + final CorpUserSettings res = + service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); } @@ -62,107 +63,94 @@ private static void testGetCorpUserSettingsValidSettings() throws Exception { private static void testGetCorpUserSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getCorpUserSettings(TEST_USER_URN, Mockito.mock(Authentication.class))); } @Test private static void testUpdateCorpUserSettingsValidSettings() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(TEST_USER_URN.toString()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class)); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(TEST_USER_URN.toString()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + service.updateCorpUserSettings(TEST_USER_URN, newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateCorpUserSettingsSettingsException() throws Exception { - final CorpUserSettings newSettings = new CorpUserSettings() - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final CorpUserSettings newSettings = + new CorpUserSettings() + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_VIEW_URN)) + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal expectedProposal = buildUpdateCorpUserSettingsChangeProposal( - TEST_USER_URN, - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateCorpUserSettingsChangeProposal(TEST_USER_URN, newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateCorpUserSettings( - TEST_USER_URN, - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> + service.updateCorpUserSettings( + TEST_USER_URN, newSettings, Mockito.mock(Authentication.class))); } @Test private static void testGetGlobalSettingsNullSettings() throws Exception { - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(null), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(null), Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertNull(res); } @Test private static void testGetGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo existingSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo existingSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final SettingsService service = new SettingsService( - getGlobalSettingsEntityClientMock(existingSettings), - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService( + getGlobalSettingsEntityClientMock(existingSettings), + Mockito.mock(Authentication.class)); final GlobalSettingsInfo res = service.getGlobalSettings(Mockito.mock(Authentication.class)); Assert.assertEquals(existingSettings, res); @@ -172,136 +160,131 @@ private static void testGetGlobalSettingsValidSettings() throws Exception { private static void testGetGlobalSettingsSettingsException() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - Assert.assertThrows(RuntimeException.class, () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); + Assert.assertThrows( + RuntimeException.class, + () -> service.getGlobalSettings(Mockito.mock(Authentication.class))); } @Test private static void testUpdateGlobalSettingsValidSettings() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal(newSettings); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenReturn(GLOBAL_SETTINGS_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenReturn(GLOBAL_SETTINGS_URN.toString()); - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); - service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class)); + service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class)); Mockito.verify(mockClient, Mockito.times(1)) .ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test private static void testUpdateGlobalSettingsSettingsException() throws Exception { - final GlobalSettingsInfo newSettings = new GlobalSettingsInfo() - .setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); + final GlobalSettingsInfo newSettings = + new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_VIEW_URN)); - final MetadataChangeProposal expectedProposal = buildUpdateGlobalSettingsChangeProposal( - newSettings - ); + final MetadataChangeProposal expectedProposal = + buildUpdateGlobalSettingsChangeProposal(newSettings); final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - )).thenThrow(new RemoteInvocationException()); - - final SettingsService service = new SettingsService( - mockClient, - Mockito.mock(Authentication.class) - ); - - Assert.assertThrows(RuntimeException.class, () -> service.updateGlobalSettings( - newSettings, - Mockito.mock(Authentication.class))); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false))) + .thenThrow(new RemoteInvocationException()); + + final SettingsService service = + new SettingsService(mockClient, Mockito.mock(Authentication.class)); + + Assert.assertThrows( + RuntimeException.class, + () -> service.updateGlobalSettings(newSettings, Mockito.mock(Authentication.class))); } - private static EntityClient getCorpUserSettingsEntityClientMock(@Nullable final CorpUserSettings settings) - throws Exception { + private static EntityClient getCorpUserSettingsEntityClientMock( + @Nullable final CorpUserSettings settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - Constants.CORP_USER_SETTINGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.CORP_USER_ENTITY_NAME), - Mockito.eq(TEST_USER_URN), - Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.CORP_USER_ENTITY_NAME) - .setUrn(TEST_USER_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + Constants.CORP_USER_SETTINGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.CORP_USER_ENTITY_NAME), + Mockito.eq(TEST_USER_URN), + Mockito.eq(ImmutableSet.of(Constants.CORP_USER_SETTINGS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.CORP_USER_ENTITY_NAME) + .setUrn(TEST_USER_URN) + .setAspects(aspectMap)); return mockClient; } - private static EntityClient getGlobalSettingsEntityClientMock(@Nullable final GlobalSettingsInfo settings) - throws Exception { + private static EntityClient getGlobalSettingsEntityClientMock( + @Nullable final GlobalSettingsInfo settings) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EnvelopedAspectMap aspectMap = settings != null ? new EnvelopedAspectMap(ImmutableMap.of( - GLOBAL_SETTINGS_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(settings.data())) - )) : new EnvelopedAspectMap(); - - Mockito.when(mockClient.getV2( - Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), - Mockito.eq(GLOBAL_SETTINGS_URN), - Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - new EntityResponse() - .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) - .setUrn(GLOBAL_SETTINGS_URN) - .setAspects(aspectMap) - ); + EnvelopedAspectMap aspectMap = + settings != null + ? new EnvelopedAspectMap( + ImmutableMap.of( + GLOBAL_SETTINGS_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(settings.data())))) + : new EnvelopedAspectMap(); + + Mockito.when( + mockClient.getV2( + Mockito.eq(GLOBAL_SETTINGS_ENTITY_NAME), + Mockito.eq(GLOBAL_SETTINGS_URN), + Mockito.eq(ImmutableSet.of(GLOBAL_SETTINGS_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.GLOBAL_SETTINGS_INFO_ASPECT_NAME) + .setUrn(GLOBAL_SETTINGS_URN) + .setAspects(aspectMap)); return mockClient; } private static MetadataChangeProposal buildUpdateCorpUserSettingsChangeProposal( - final Urn urn, - final CorpUserSettings newSettings) { - final MetadataChangeProposal mcp = new MetadataChangeProposal(); - mcp.setEntityUrn(urn); - mcp.setEntityType(CORP_USER_ENTITY_NAME); - mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); - mcp.setChangeType(ChangeType.UPSERT); - mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); - return mcp; + final Urn urn, final CorpUserSettings newSettings) { + final MetadataChangeProposal mcp = new MetadataChangeProposal(); + mcp.setEntityUrn(urn); + mcp.setEntityType(CORP_USER_ENTITY_NAME); + mcp.setAspectName(CORP_USER_SETTINGS_ASPECT_NAME); + mcp.setChangeType(ChangeType.UPSERT); + mcp.setAspect(GenericRecordUtils.serializeAspect(newSettings)); + return mcp; } private static MetadataChangeProposal buildUpdateGlobalSettingsChangeProposal( diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java index 125265540dc77..e7ed3db82d0f2 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/TagServiceTest.java @@ -33,56 +33,55 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class TagServiceTest { private static final Urn TEST_TAG_URN_1 = UrnUtils.getUrn("urn:li:tag:test"); private static final Urn TEST_TAG_URN_2 = UrnUtils.getUrn("urn:li:tag:test2"); - private static final Urn TEST_ENTITY_URN_1 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); - private static final Urn TEST_ENTITY_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); + private static final Urn TEST_ENTITY_URN_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test,PROD)"); + private static final Urn TEST_ENTITY_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:kafka,test1,PROD)"); @Test private void testAddTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -90,37 +89,35 @@ private void testAddTagToEntityExistingTag() throws Exception { private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); - - TagAssociationArray expectedTermsArray = new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); + + TagAssociationArray expectedTermsArray = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expectedTermsArray); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expectedTermsArray); } @@ -128,50 +125,59 @@ private void testAddGlobalTagsToEntityNoExistingTag() throws Exception { private void testAddTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); + List events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -179,90 +185,95 @@ private void testAddGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List events = service.buildAddTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); - - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn))) - ); + List events = + service.buildAddTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); + + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(newTagUrn)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test private void testRemoveTagToEntityExistingTag() throws Exception { GlobalTags existingGlobalTags = new GlobalTags(); - existingGlobalTags.setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - ))); + existingGlobalTags.setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); EntityClient mockClient = createMockGlobalTagsClient(existingGlobalTags); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); - GlobalTags expected = new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); + GlobalTags expected = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1, expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - RecordTemplate tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + RecordTemplate tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2, expected); } @@ -270,36 +281,33 @@ private void testRemoveTagToEntityExistingTag() throws Exception { private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { EntityClient mockClient = createMockGlobalTagsClient(null); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); Urn newTagUrn = UrnUtils.getUrn("urn:li:tag:newTag"); - List events = service.buildRemoveTagsProposals( - ImmutableList.of(newTagUrn), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, null, null), - new ResourceReference(TEST_ENTITY_URN_2, null, null)), - mockAuthentication()); + List events = + service.buildRemoveTagsProposals( + ImmutableList.of(newTagUrn), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, null, null), + new ResourceReference(TEST_ENTITY_URN_2, null, null)), + mockAuthentication()); TagAssociationArray expected = new TagAssociationArray(ImmutableList.of()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), event1.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect1.getTags(), expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.GLOBAL_TAGS_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - GlobalTags tagsAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - GlobalTags.class); + GlobalTags tagsAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), event2.getAspect().getContentType(), GlobalTags.class); Assert.assertEquals(tagsAspect2.getTags(), expected); } @@ -307,51 +315,58 @@ private void testRemoveGlobalTagsToEntityNoExistingTag() throws Exception { private void testRemoveTagToSchemaFieldExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags().setTags(new TagAssociationArray( - ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), - new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))) - ))) - )) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags( + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_1)), + new TagAssociation() + .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2))))))))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_TAG_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_TAG_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); - TagAssociationArray expected = new TagAssociationArray(ImmutableList.of( - new TagAssociation() - .setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)) - )); + TagAssociationArray expected = + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(TagUrn.createFromUrn(TEST_TAG_URN_2)))); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), expected); + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + expected); } @Test @@ -359,78 +374,90 @@ private void testRemoveGlobalTagsToSchemaFieldNoExistingTag() throws Exception { EditableSchemaMetadata existingMetadata = new EditableSchemaMetadata(); existingMetadata.setEditableSchemaFieldInfo( - new EditableSchemaFieldInfoArray(ImmutableList.of( - new EditableSchemaFieldInfo() - .setFieldPath("myfield") - .setGlobalTags(new GlobalTags()))) - ); + new EditableSchemaFieldInfoArray( + ImmutableList.of( + new EditableSchemaFieldInfo() + .setFieldPath("myfield") + .setGlobalTags(new GlobalTags())))); EntityClient mockClient = createMockSchemaMetadataEntityClient(existingMetadata); - final TagService service = new TagService( - mockClient, - Mockito.mock(Authentication.class)); + final TagService service = new TagService(mockClient, Mockito.mock(Authentication.class)); - List events = service.buildRemoveTagsProposals( - ImmutableList.of(TEST_ENTITY_URN_1), - ImmutableList.of( - new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), - new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), - mockAuthentication()); + List events = + service.buildRemoveTagsProposals( + ImmutableList.of(TEST_ENTITY_URN_1), + ImmutableList.of( + new ResourceReference(TEST_ENTITY_URN_1, SubResourceType.DATASET_FIELD, "myfield"), + new ResourceReference(TEST_ENTITY_URN_2, SubResourceType.DATASET_FIELD, "myfield")), + mockAuthentication()); MetadataChangeProposal event1 = events.get(0); Assert.assertEquals(event1.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event1.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect1 = GenericRecordUtils.deserializeAspect( - event1.getAspect().getValue(), - event1.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); + EditableSchemaMetadata editableSchemaMetadataAspect1 = + GenericRecordUtils.deserializeAspect( + event1.getAspect().getValue(), + event1.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect1.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); MetadataChangeProposal event2 = events.get(0); Assert.assertEquals(event2.getAspectName(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); Assert.assertEquals(event2.getEntityType(), Constants.DATASET_ENTITY_NAME); - EditableSchemaMetadata editableSchemaMetadataAspect2 = GenericRecordUtils.deserializeAspect( - event2.getAspect().getValue(), - event2.getAspect().getContentType(), - EditableSchemaMetadata.class); - Assert.assertEquals(editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), Collections.emptyList()); - + EditableSchemaMetadata editableSchemaMetadataAspect2 = + GenericRecordUtils.deserializeAspect( + event2.getAspect().getValue(), + event2.getAspect().getContentType(), + EditableSchemaMetadata.class); + Assert.assertEquals( + editableSchemaMetadataAspect2.getEditableSchemaFieldInfo().get(0).getGlobalTags().getTags(), + Collections.emptyList()); } - private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) throws Exception { + private static EntityClient createMockGlobalTagsClient(@Nullable GlobalTags existingGlobalTags) + throws Exception { return createMockEntityClient(existingGlobalTags, Constants.GLOBAL_TAGS_ASPECT_NAME); } - private static EntityClient createMockSchemaMetadataEntityClient(@Nullable EditableSchemaMetadata existingMetadata) throws Exception { + private static EntityClient createMockSchemaMetadataEntityClient( + @Nullable EditableSchemaMetadata existingMetadata) throws Exception { return createMockEntityClient(existingMetadata, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME); } - private static EntityClient createMockEntityClient(@Nullable RecordTemplate aspect, String aspectName) throws Exception { + private static EntityClient createMockEntityClient( + @Nullable RecordTemplate aspect, String aspectName) throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), - Mockito.eq(ImmutableSet.of(aspectName)), - Mockito.any(Authentication.class))) - .thenReturn(aspect != null ? ImmutableMap.of( - TEST_ENTITY_URN_1, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_1) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))), - TEST_ENTITY_URN_2, - new EntityResponse() - .setUrn(TEST_ENTITY_URN_2) - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - aspectName, - new EnvelopedAspect().setValue(new Aspect(aspect.data())) - ))) - ) : Collections.emptyMap()); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2)), + Mockito.eq(ImmutableSet.of(aspectName)), + Mockito.any(Authentication.class))) + .thenReturn( + aspect != null + ? ImmutableMap.of( + TEST_ENTITY_URN_1, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_1) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data()))))), + TEST_ENTITY_URN_2, + new EntityResponse() + .setUrn(TEST_ENTITY_URN_2) + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + aspectName, + new EnvelopedAspect().setValue(new Aspect(aspect.data())))))) + : Collections.emptyMap()); return mockClient; } @@ -439,4 +466,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, Constants.SYSTEM_ACTOR)); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java index 5841717e7db93..cd62cf3959103 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/ViewServiceTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.service; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -33,9 +35,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; - - public class ViewServiceTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -45,150 +44,187 @@ public class ViewServiceTest { private void testCreateViewSuccess() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: With description - Urn urn = service.createView(DataHubViewType.PERSONAL, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + Urn urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); // Case 2: Without description - urn = service.createView(DataHubViewType.PERSONAL, - "test view", - null, - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ); + urn = + service.createView( + DataHubViewType.PERSONAL, + "test view", + null, + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L); Assert.assertEquals(urn, TEST_VIEW_URN); - Mockito.verify(mockClient, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(2)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testCreateViewErrorMissingInputs() throws Exception { final EntityClient mockClient = createViewMockEntityClient(); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Case 1: missing View Type Assert.assertThrows( RuntimeException.class, - () -> service.createView(null, - "test view", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); - + () -> + service.createView( + null, + "test view", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 2: missing View name Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - null, - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + null, + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 0L)); // Case 3: missing View definition Assert.assertThrows( RuntimeException.class, - () -> service.createView(DataHubViewType.PERSONAL, - "My name", - "my description", - null, - mockAuthentication(), - 0L - ) - ); + () -> + service.createView( + DataHubViewType.PERSONAL, + "My name", + "my description", + null, + mockAuthentication(), + 0L)); } @Test private void testCreateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.createView( - DataHubViewType.PERSONAL, - "new name", - "my description", - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))), - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> + service.createView( + DataHubViewType.PERSONAL, + "new name", + "my description", + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))), + mockAuthentication(), + 1L)); } @Test @@ -196,9 +232,10 @@ private void testUpdateViewSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String oldName = "old name"; final String oldDescription = "old description"; - final DataHubViewDefinition oldDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray()) - .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); + final DataHubViewDefinition oldDefinition = + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList()))); final EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -211,39 +248,39 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; final String newDescription = "new description"; - final DataHubViewDefinition newDefinition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition newDefinition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); // Case 1: Update name only - service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, oldDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -254,24 +291,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 2: Update description only - service.updateView( - TEST_VIEW_URN, - null, - newDescription, - null, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, newDescription, null, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, newDescription, oldDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -282,23 +313,18 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 3: Update definition only - service.updateView(TEST_VIEW_URN, - null, - null, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + service.updateView(TEST_VIEW_URN, null, null, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, oldName, oldDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); resetUpdateViewMockEntityClient( mockClient, @@ -309,110 +335,88 @@ private void testUpdateViewSuccess() throws Exception { oldDefinition, TEST_USER_URN, 0L, - 0L - ); + 0L); // Case 4: Update all fields at once service.updateView( - TEST_VIEW_URN, - newName, - newDescription, - newDefinition, - mockAuthentication(), - 1L - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(buildUpdateViewProposal(TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + TEST_VIEW_URN, newName, newDescription, newDefinition, mockAuthentication(), 1L); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + buildUpdateViewProposal( + TEST_VIEW_URN, type, newName, newDescription, newDefinition, 0L, 1L)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test private void testUpdateViewMissingView() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final String newName = "new name"; // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - newName, - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, newName, null, null, mockAuthentication(), 1L)); } @Test private void testUpdateViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.updateView( - TEST_VIEW_URN, - "new name", - null, - null, - mockAuthentication(), - 1L - )); + Assert.assertThrows( + RuntimeException.class, + () -> service.updateView(TEST_VIEW_URN, "new name", null, null, mockAuthentication(), 1L)); } @Test private void testDeleteViewSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); service.deleteView(TEST_VIEW_URN, mockAuthentication()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test private void testDeleteViewError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).deleteEntity( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.deleteView(TEST_VIEW_URN, mockAuthentication())); } @Test @@ -422,31 +426,28 @@ private void testGetViewInfoSuccess() throws Exception { final DataHubViewType type = DataHubViewType.PERSONAL; final String name = "name"; final String description = "description"; - final DataHubViewDefinition definition = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of(new Criterion() - .setField("field") - .setCondition(Condition.EQUAL) - .setValue("value") - ))))))); + final DataHubViewDefinition definition = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray(ImmutableList.of(DATASET_ENTITY_NAME, DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setCondition(Condition.EQUAL) + .setValue("value")))))))); resetGetViewInfoMockEntityClient( - mockClient, - TEST_VIEW_URN, - type, - name, - description, - definition, - TEST_USER_URN, - 0L, - 1L - ); + mockClient, TEST_VIEW_URN, type, name, description, definition, TEST_USER_URN, 0L, 1L); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); final DataHubViewInfo info = service.getViewInfo(TEST_VIEW_URN, mockAuthentication()); @@ -464,37 +465,36 @@ private void testGetViewInfoSuccess() throws Exception { private void testGetViewInfoNoViewExists() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn(null); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); Assert.assertNull(service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); - } @Test private void testGetViewInfoError() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(new RemoteInvocationException()).when(mockClient).getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(TEST_VIEW_URN), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class)); + Mockito.doThrow(new RemoteInvocationException()) + .when(mockClient) + .getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(TEST_VIEW_URN), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); - final ViewService service = new ViewService( - mockClient, - Mockito.mock(Authentication.class)); + final ViewService service = new ViewService(mockClient, Mockito.mock(Authentication.class)); // Throws wrapped exception - Assert.assertThrows(RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); + Assert.assertThrows( + RuntimeException.class, () -> service.getViewInfo(TEST_VIEW_URN, mockAuthentication())); } private static MetadataChangeProposal buildUpdateViewProposal( @@ -525,10 +525,12 @@ private static MetadataChangeProposal buildUpdateViewProposal( private static EntityClient createViewMockEntityClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(TEST_VIEW_URN.toString()); + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(TEST_VIEW_URN.toString()); return mockClient; } @@ -541,36 +543,42 @@ private static void resetUpdateViewMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - Mockito.when(mockClient.ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false))).thenReturn(viewUrn.toString()); - - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false))) + .thenReturn(viewUrn.toString()); + + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( - new EntityResponse() - .setUrn(viewUrn) - .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + new EntityResponse() + .setUrn(viewUrn) + .setEntityName(DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static void resetGetViewInfoMockEntityClient( @@ -582,31 +590,35 @@ private static void resetGetViewInfoMockEntityClient( final DataHubViewDefinition existingDefinition, final Urn existingOwner, final long existingCreatedAt, - final long existingUpdatedAt) throws Exception { + final long existingUpdatedAt) + throws Exception { Mockito.reset(mockClient); - final DataHubViewInfo existingInfo = new DataHubViewInfo() - .setType(existingType) - .setName(existingName) - .setDescription(existingDescription) - .setDefinition(existingDefinition) - .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) - .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); - - Mockito.when(mockClient.getV2( - Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(viewUrn), - Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) + final DataHubViewInfo existingInfo = + new DataHubViewInfo() + .setType(existingType) + .setName(existingName) + .setDescription(existingDescription) + .setDefinition(existingDefinition) + .setCreated(new AuditStamp().setActor(existingOwner).setTime(existingCreatedAt)) + .setLastModified(new AuditStamp().setActor(existingOwner).setTime(existingUpdatedAt)); + + Mockito.when( + mockClient.getV2( + Mockito.eq(DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(viewUrn), + Mockito.eq(ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) .thenReturn( new EntityResponse() .setUrn(viewUrn) .setEntityName(DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(existingInfo.data())) - )))); + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(existingInfo.data())))))); } private static Authentication mockAuthentication() { @@ -614,4 +626,4 @@ private static Authentication mockAuthentication() { Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, TEST_USER_URN.getId())); return mockAuth; } -} \ No newline at end of file +} diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java index 3ea2b01c3e214..1b3ef20cff00a 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/Config.java @@ -12,9 +12,9 @@ import com.linkedin.util.Pair; import java.io.IOException; import java.io.PrintWriter; +import java.time.ZoneId; import java.util.HashMap; import java.util.Map; -import java.time.ZoneId; import javax.servlet.ServletContext; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; @@ -27,30 +27,46 @@ public class Config extends HttpServlet { - Map config = new HashMap() {{ - put("noCode", "true"); - put("retention", "true"); - put("statefulIngestionCapable", true); - put("patchCapable", true); - put("timeZone", ZoneId.systemDefault().toString()); - }}; - ObjectMapper objectMapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); - - private Map> getPluginModels(ServletContext servletContext) { - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); + Map config = + new HashMap() { + { + put("noCode", "true"); + put("retention", "true"); + put("statefulIngestionCapable", true); + put("patchCapable", true); + put("timeZone", ZoneId.systemDefault().toString()); + } + }; + ObjectMapper objectMapper = + new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + + private Map> getPluginModels( + ServletContext servletContext) { + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(servletContext); PluginEntityRegistryLoader pluginEntityRegistryLoader = (PluginEntityRegistryLoader) ctx.getBean("pluginEntityRegistry"); - Map>> patchRegistries = - pluginEntityRegistryLoader.getPatchRegistries(); - Map> patchDiagnostics = new HashMap<>(); + Map>> + patchRegistries = pluginEntityRegistryLoader.getPatchRegistries(); + Map> patchDiagnostics = + new HashMap<>(); patchRegistries.keySet().forEach(name -> patchDiagnostics.put(name, new HashMap<>())); - patchRegistries.entrySet().forEach(entry -> { - entry.getValue() - .entrySet() - .forEach(versionLoadEntry -> patchDiagnostics.get(entry.getKey()) - .put(versionLoadEntry.getKey(), versionLoadEntry.getValue().getSecond())); - }); + patchRegistries + .entrySet() + .forEach( + entry -> { + entry + .getValue() + .entrySet() + .forEach( + versionLoadEntry -> + patchDiagnostics + .get(entry.getKey()) + .put( + versionLoadEntry.getKey(), + versionLoadEntry.getValue().getSecond())); + }); return patchDiagnostics; } @@ -74,7 +90,8 @@ private boolean checkImpactAnalysisSupport(WebApplicationContext ctx) { protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException { config.put("noCode", "true"); - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); config.put("supportsImpactAnalysis", checkImpactAnalysisSupport(ctx)); @@ -85,21 +102,30 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IO ConfigurationProvider configProvider = getConfigProvider(ctx); - Map telemetryConfig = new HashMap() {{ - put("enabledCli", configProvider.getTelemetry().enabledCli); - put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); - }}; + Map telemetryConfig = + new HashMap() { + { + put("enabledCli", configProvider.getTelemetry().enabledCli); + put("enabledIngestion", configProvider.getTelemetry().enabledIngestion); + } + }; config.put("telemetry", telemetryConfig); - Map ingestionConfig = new HashMap() {{ - put("enabled", configProvider.getIngestion().enabled); - put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); - }}; + Map ingestionConfig = + new HashMap() { + { + put("enabled", configProvider.getIngestion().enabled); + put("defaultCliVersion", configProvider.getIngestion().defaultCliVersion); + } + }; config.put("managedIngestion", ingestionConfig); - Map datahubConfig = new HashMap() {{ - put("serverType", configProvider.getDatahub().serverType); - }}; + Map datahubConfig = + new HashMap() { + { + put("serverType", configProvider.getDatahub().serverType); + } + }; config.put("datahub", datahubConfig); resp.setContentType("application/json"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java index d788222c5d87b..ebcfaeca7059e 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/servlet/ConfigSearchExport.java @@ -1,13 +1,22 @@ package com.datahub.gms.servlet; -import com.linkedin.metadata.config.search.SearchConfiguration; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; + import com.datahub.gms.util.CSVWriter; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.gms.factory.config.ConfigurationProvider; +import com.linkedin.metadata.config.search.SearchConfiguration; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.elasticsearch.query.request.SearchRequestHandler; +import java.io.PrintWriter; +import java.util.Map; +import java.util.Optional; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.search.SearchRequest; import org.opensearch.index.query.BoolQueryBuilder; @@ -22,16 +31,6 @@ import org.springframework.web.context.WebApplicationContext; import org.springframework.web.context.support.WebApplicationContextUtils; -import javax.servlet.http.HttpServlet; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; -import java.io.PrintWriter; -import java.util.Map; -import java.util.Optional; - -import static com.linkedin.metadata.search.elasticsearch.indexbuilder.SettingsBuilder.KEYWORD_ANALYZER; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; - @Slf4j public class ConfigSearchExport extends HttpServlet { @@ -49,40 +48,73 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { CSVWriter writer = CSVWriter.builder().printWriter(pw).build(); - String[] header = {"entity", "query_category", "match_category", "query_type", "field_name", - "field_weight", "search_analyzer", "case_insensitive", "query_boost", "raw"}; + String[] header = { + "entity", + "query_category", + "match_category", + "query_type", + "field_name", + "field_weight", + "search_analyzer", + "case_insensitive", + "query_boost", + "raw" + }; writer.println(header); SEARCHABLE_ENTITY_TYPES.stream() - .map(entityType -> { + .map( + entityType -> { try { - EntitySpec entitySpec = entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); + EntitySpec entitySpec = + entityRegistry.getEntitySpec(EntityTypeMapper.getName(entityType)); return Optional.of(entitySpec); } catch (IllegalArgumentException e) { log.warn("Failed to resolve entity `{}`", entityType.name()); return Optional.empty(); } }) - .filter(Optional::isPresent) - .forEach(entitySpecOpt -> { + .filter(Optional::isPresent) + .forEach( + entitySpecOpt -> { EntitySpec entitySpec = entitySpecOpt.get(); - SearchRequest searchRequest = SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) - .getSearchRequest("*", null, null, 0, 0, new SearchFlags() - .setFulltext(true).setSkipHighlighting(true).setSkipAggregates(true), null); + SearchRequest searchRequest = + SearchRequestHandler.getBuilder(entitySpec, searchConfiguration, null) + .getSearchRequest( + "*", + null, + null, + 0, + 0, + new SearchFlags() + .setFulltext(true) + .setSkipHighlighting(true) + .setSkipAggregates(true), + null); - FunctionScoreQueryBuilder rankingQuery = ((FunctionScoreQueryBuilder) ((BoolQueryBuilder) - searchRequest.source().query()).must().get(0)); + FunctionScoreQueryBuilder rankingQuery = + ((FunctionScoreQueryBuilder) + ((BoolQueryBuilder) searchRequest.source().query()).must().get(0)); BoolQueryBuilder relevancyQuery = (BoolQueryBuilder) rankingQuery.query(); - BoolQueryBuilder simpleQueryString = (BoolQueryBuilder) relevancyQuery.should().get(0); + BoolQueryBuilder simpleQueryString = + (BoolQueryBuilder) relevancyQuery.should().get(0); BoolQueryBuilder exactPrefixMatch = (BoolQueryBuilder) relevancyQuery.should().get(1); for (QueryBuilder simpBuilder : simpleQueryString.should()) { SimpleQueryStringBuilder sqsb = (SimpleQueryStringBuilder) simpBuilder; for (Map.Entry fieldWeight : sqsb.fields().entrySet()) { - String[] row = {entitySpec.getName(), "relevancy", "fulltext", sqsb.getClass().getSimpleName(), - fieldWeight.getKey(), - fieldWeight.getValue().toString(), sqsb.analyzer(), "true", String.valueOf(sqsb.boost()), - sqsb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "fulltext", + sqsb.getClass().getSimpleName(), + fieldWeight.getKey(), + fieldWeight.getValue().toString(), + sqsb.analyzer(), + "true", + String.valueOf(sqsb.boost()), + sqsb.toString().replaceAll("\n", "") + }; writer.println(row); } } @@ -90,66 +122,119 @@ private void writeSearchCsv(WebApplicationContext ctx, PrintWriter pw) { for (QueryBuilder builder : exactPrefixMatch.should()) { if (builder instanceof TermQueryBuilder) { TermQueryBuilder tqb = (TermQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "exact_match", tqb.getClass().getSimpleName(), - tqb.fieldName(), - String.valueOf(tqb.boost()), KEYWORD_ANALYZER, String.valueOf(tqb.caseInsensitive()), "", - tqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "exact_match", + tqb.getClass().getSimpleName(), + tqb.fieldName(), + String.valueOf(tqb.boost()), + KEYWORD_ANALYZER, + String.valueOf(tqb.caseInsensitive()), + "", + tqb.toString().replaceAll("\n", "") + }; writer.println(row); } else if (builder instanceof MatchPhrasePrefixQueryBuilder) { MatchPhrasePrefixQueryBuilder mppqb = (MatchPhrasePrefixQueryBuilder) builder; - String[] row = {entitySpec.getName(), "relevancy", "prefix_match", mppqb.getClass().getSimpleName(), - mppqb.fieldName(), - String.valueOf(mppqb.boost()), "", "true", "", mppqb.toString().replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "relevancy", + "prefix_match", + mppqb.getClass().getSimpleName(), + mppqb.fieldName(), + String.valueOf(mppqb.boost()), + "", + "true", + "", + mppqb.toString().replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled exact prefix builder: " + builder.getClass().getName()); + throw new IllegalStateException( + "Unhandled exact prefix builder: " + builder.getClass().getName()); } } - for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : rankingQuery.filterFunctionBuilders()) { + for (FunctionScoreQueryBuilder.FilterFunctionBuilder ffb : + rankingQuery.filterFunctionBuilders()) { if (ffb.getFilter() instanceof MatchAllQueryBuilder) { MatchAllQueryBuilder filter = (MatchAllQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), "*", - String.valueOf(scoreFunction.getWeight()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + "*", + String.valueOf(scoreFunction.getWeight()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else if (ffb.getScoreFunction() instanceof FieldValueFactorFunctionBuilder) { - FieldValueFactorFunctionBuilder scoreFunction = (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), scoreFunction.fieldName(), - String.valueOf(scoreFunction.factor()), "", "true", String.valueOf(filter.boost()), - String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + FieldValueFactorFunctionBuilder scoreFunction = + (FieldValueFactorFunctionBuilder) ffb.getScoreFunction(); + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + scoreFunction.fieldName(), + String.valueOf(scoreFunction.factor()), + "", + "true", + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else if (ffb.getFilter() instanceof TermQueryBuilder) { TermQueryBuilder filter = (TermQueryBuilder) ffb.getFilter(); if (ffb.getScoreFunction() instanceof WeightBuilder) { WeightBuilder scoreFunction = (WeightBuilder) ffb.getScoreFunction(); - String[] row = {entitySpec.getName(), "score", filter.getClass().getSimpleName(), - scoreFunction.getClass().getSimpleName(), filter.fieldName() + "=" + filter.value().toString(), - String.valueOf(scoreFunction.getWeight()), KEYWORD_ANALYZER, String.valueOf(filter.caseInsensitive()), - String.valueOf(filter.boost()), String.format("{\"filter\":%s,\"scoreFunction\":%s", filter, - CSVWriter.builderToString(scoreFunction)).replaceAll("\n", "")}; + String[] row = { + entitySpec.getName(), + "score", + filter.getClass().getSimpleName(), + scoreFunction.getClass().getSimpleName(), + filter.fieldName() + "=" + filter.value().toString(), + String.valueOf(scoreFunction.getWeight()), + KEYWORD_ANALYZER, + String.valueOf(filter.caseInsensitive()), + String.valueOf(filter.boost()), + String.format( + "{\"filter\":%s,\"scoreFunction\":%s", + filter, CSVWriter.builderToString(scoreFunction)) + .replaceAll("\n", "") + }; writer.println(row); } else { - throw new IllegalStateException("Unhandled score function: " + ffb.getScoreFunction()); + throw new IllegalStateException( + "Unhandled score function: " + ffb.getScoreFunction()); } } else { - throw new IllegalStateException("Unhandled function score filter: " + ffb.getFilter()); + throw new IllegalStateException( + "Unhandled function score filter: " + ffb.getFilter()); } } }); } - @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) { if (!"csv".equals(req.getParameter("format"))) { @@ -157,7 +242,8 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) { return; } - WebApplicationContext ctx = WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); + WebApplicationContext ctx = + WebApplicationContextUtils.getRequiredWebApplicationContext(req.getServletContext()); try { resp.setContentType("text/csv"); diff --git a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java index 79d4f7077b797..da5f0b75efdae 100644 --- a/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java +++ b/metadata-service/servlet/src/main/java/com/datahub/gms/util/CSVWriter.java @@ -1,45 +1,41 @@ package com.datahub.gms.util; - -import lombok.Builder; -import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; -import org.opensearch.index.query.functionscore.WeightBuilder; - import java.io.PrintWriter; import java.util.stream.Collectors; import java.util.stream.Stream; +import lombok.Builder; +import org.opensearch.index.query.functionscore.FieldValueFactorFunctionBuilder; +import org.opensearch.index.query.functionscore.WeightBuilder; @Builder public class CSVWriter { - private PrintWriter printWriter; - - public CSVWriter println(String[] data) { - printWriter.println(convertToCSV(data)); - return this; - } - - private static String convertToCSV(String[] data) { - return Stream.of(data) - .map(CSVWriter::escapeSpecialCharacters) - .collect(Collectors.joining(",")); - } - - private static String escapeSpecialCharacters(String data) { - String escapedData = data.replaceAll("\\R", " "); - if (data.contains(",") || data.contains("\"") || data.contains("'")) { - data = data.replace("\"", "\"\""); - escapedData = "\"" + data + "\""; - } - return escapedData; - } - - public static String builderToString(FieldValueFactorFunctionBuilder in) { - return String.format("{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", - in.fieldName(), in.factor(), in.missing(), in.modifier()); - } - - public static String builderToString(WeightBuilder in) { - return String.format("{\"weight\":%s}", in.getWeight()); + private PrintWriter printWriter; + + public CSVWriter println(String[] data) { + printWriter.println(convertToCSV(data)); + return this; + } + + private static String convertToCSV(String[] data) { + return Stream.of(data).map(CSVWriter::escapeSpecialCharacters).collect(Collectors.joining(",")); + } + + private static String escapeSpecialCharacters(String data) { + String escapedData = data.replaceAll("\\R", " "); + if (data.contains(",") || data.contains("\"") || data.contains("'")) { + data = data.replace("\"", "\"\""); + escapedData = "\"" + data + "\""; } + return escapedData; + } + + public static String builderToString(FieldValueFactorFunctionBuilder in) { + return String.format( + "{\"field\":\"%s\",\"factor\":%s,\"missing\":%s,\"modifier\":\"%s\"}", + in.fieldName(), in.factor(), in.missing(), in.modifier()); + } + + public static String builderToString(WeightBuilder in) { + return String.format("{\"weight\":%s}", in.getWeight()); + } } - diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index df960808d8a41..8258a7d226ed6 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -9,10 +9,7 @@ import lombok.Data; import lombok.Getter; - -/** - * This policies config file defines the base set of privileges that DataHub supports. - */ +/** This policies config file defines the base set of privileges that DataHub supports. */ public class PoliciesConfig { public static final String PLATFORM_POLICY_TYPE = "PLATFORM"; @@ -22,547 +19,580 @@ public class PoliciesConfig { // Platform Privileges // - public static final Privilege MANAGE_POLICIES_PRIVILEGE = Privilege.of( - "MANAGE_POLICIES", - "Manage Policies", - "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + public static final Privilege MANAGE_POLICIES_PRIVILEGE = + Privilege.of( + "MANAGE_POLICIES", + "Manage Policies", + "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); + + public static final Privilege MANAGE_INGESTION_PRIVILEGE = + Privilege.of( + "MANAGE_INGESTION", + "Manage Metadata Ingestion", + "Create, remove, and update Metadata Ingestion sources."); + + public static final Privilege MANAGE_SECRETS_PRIVILEGE = + Privilege.of( + "MANAGE_SECRETS", "Manage Secrets", "Create & remove Secrets stored inside DataHub."); + + public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = + Privilege.of( + "MANAGE_USERS_AND_GROUPS", + "Manage Users & Groups", + "Create, remove, and update users and groups on DataHub."); + + public static final Privilege VIEW_ANALYTICS_PRIVILEGE = + Privilege.of("VIEW_ANALYTICS", "View Analytics", "View the DataHub analytics dashboard."); + + public static final Privilege GET_ANALYTICS_PRIVILEGE = + Privilege.of( + "GET_ANALYTICS_PRIVILEGE", + "Analytics API access", + "API read access to raw analytics data."); - public static final Privilege MANAGE_INGESTION_PRIVILEGE = Privilege.of( - "MANAGE_INGESTION", - "Manage Metadata Ingestion", - "Create, remove, and update Metadata Ingestion sources."); + public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = + Privilege.of( + "GENERATE_PERSONAL_ACCESS_TOKENS", + "Generate Personal Access Tokens", + "Generate personal access tokens for use with DataHub APIs."); - public static final Privilege MANAGE_SECRETS_PRIVILEGE = Privilege.of( - "MANAGE_SECRETS", - "Manage Secrets", - "Create & remove Secrets stored inside DataHub."); + public static final Privilege MANAGE_ACCESS_TOKENS = + Privilege.of( + "MANAGE_ACCESS_TOKENS", + "Manage All Access Tokens", + "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " + + "privilege are effectively super users that can impersonate other users."); - public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = Privilege.of( - "MANAGE_USERS_AND_GROUPS", - "Manage Users & Groups", - "Create, remove, and update users and groups on DataHub."); + public static final Privilege MANAGE_DOMAINS_PRIVILEGE = + Privilege.of("MANAGE_DOMAINS", "Manage Domains", "Create and remove Asset Domains."); - public static final Privilege VIEW_ANALYTICS_PRIVILEGE = Privilege.of( - "VIEW_ANALYTICS", - "View Analytics", - "View the DataHub analytics dashboard."); + public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "MANAGE_GLOBAL_ANNOUNCEMENTS", + "Manage Home Page Posts", + "Create and delete home page posts"); - public static final Privilege GET_ANALYTICS_PRIVILEGE = Privilege.of( - "GET_ANALYTICS_PRIVILEGE", - "Analytics API access", - "API read access to raw analytics data."); + public static final Privilege MANAGE_TESTS_PRIVILEGE = + Privilege.of("MANAGE_TESTS", "Manage Tests", "Create and remove Asset Tests."); - public static final Privilege GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE = Privilege.of( - "GENERATE_PERSONAL_ACCESS_TOKENS", - "Generate Personal Access Tokens", - "Generate personal access tokens for use with DataHub APIs."); - - public static final Privilege MANAGE_ACCESS_TOKENS = Privilege.of( - "MANAGE_ACCESS_TOKENS", - "Manage All Access Tokens", - "Create, list and revoke access tokens on behalf of users in DataHub. Be careful - Actors with this " - + "privilege are effectively super users that can impersonate other users." - ); - - public static final Privilege MANAGE_DOMAINS_PRIVILEGE = Privilege.of( - "MANAGE_DOMAINS", - "Manage Domains", - "Create and remove Asset Domains."); - - public static final Privilege MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "MANAGE_GLOBAL_ANNOUNCEMENTS", - "Manage Home Page Posts", - "Create and delete home page posts"); - - public static final Privilege MANAGE_TESTS_PRIVILEGE = Privilege.of( - "MANAGE_TESTS", - "Manage Tests", - "Create and remove Asset Tests."); - - public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARIES", - "Manage Glossaries", - "Create, edit, and remove Glossary Entities"); + public static final Privilege MANAGE_GLOSSARIES_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARIES", "Manage Glossaries", "Create, edit, and remove Glossary Entities"); public static final Privilege MANAGE_USER_CREDENTIALS_PRIVILEGE = - Privilege.of("MANAGE_USER_CREDENTIALS", "Manage User Credentials", + Privilege.of( + "MANAGE_USER_CREDENTIALS", + "Manage User Credentials", "Manage credentials for native DataHub users, including inviting new users and resetting passwords"); - public static final Privilege MANAGE_TAGS_PRIVILEGE = Privilege.of( - "MANAGE_TAGS", - "Manage Tags", - "Create and remove Tags."); - - public static final Privilege CREATE_TAGS_PRIVILEGE = Privilege.of( - "CREATE_TAGS", - "Create Tags", - "Create new Tags."); - - public static final Privilege CREATE_DOMAINS_PRIVILEGE = Privilege.of( - "CREATE_DOMAINS", - "Create Domains", - "Create new Domains."); - - public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = Privilege.of( - "CREATE_GLOBAL_ANNOUNCEMENTS", - "Create Global Announcements", - "Create new Global Announcements."); - - public static final Privilege MANAGE_GLOBAL_VIEWS = Privilege.of( - "MANAGE_GLOBAL_VIEWS", - "Manage Public Views", - "Create, update, and delete any Public (shared) Views."); - - public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = Privilege.of( - "MANAGE_GLOBAL_OWNERSHIP_TYPES", - "Manage Ownership Types", - "Create, update and delete Ownership Types."); - - public static final List PLATFORM_PRIVILEGES = ImmutableList.of( - MANAGE_POLICIES_PRIVILEGE, - MANAGE_USERS_AND_GROUPS_PRIVILEGE, - VIEW_ANALYTICS_PRIVILEGE, - GET_ANALYTICS_PRIVILEGE, - MANAGE_DOMAINS_PRIVILEGE, - MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_INGESTION_PRIVILEGE, - MANAGE_SECRETS_PRIVILEGE, - GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, - MANAGE_ACCESS_TOKENS, - MANAGE_TESTS_PRIVILEGE, - MANAGE_GLOSSARIES_PRIVILEGE, - MANAGE_USER_CREDENTIALS_PRIVILEGE, - MANAGE_TAGS_PRIVILEGE, - CREATE_TAGS_PRIVILEGE, - CREATE_DOMAINS_PRIVILEGE, - CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, - MANAGE_GLOBAL_VIEWS, - MANAGE_GLOBAL_OWNERSHIP_TYPES - ); + public static final Privilege MANAGE_TAGS_PRIVILEGE = + Privilege.of("MANAGE_TAGS", "Manage Tags", "Create and remove Tags."); + + public static final Privilege CREATE_TAGS_PRIVILEGE = + Privilege.of("CREATE_TAGS", "Create Tags", "Create new Tags."); + + public static final Privilege CREATE_DOMAINS_PRIVILEGE = + Privilege.of("CREATE_DOMAINS", "Create Domains", "Create new Domains."); + + public static final Privilege CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE = + Privilege.of( + "CREATE_GLOBAL_ANNOUNCEMENTS", + "Create Global Announcements", + "Create new Global Announcements."); + + public static final Privilege MANAGE_GLOBAL_VIEWS = + Privilege.of( + "MANAGE_GLOBAL_VIEWS", + "Manage Public Views", + "Create, update, and delete any Public (shared) Views."); + + public static final Privilege MANAGE_GLOBAL_OWNERSHIP_TYPES = + Privilege.of( + "MANAGE_GLOBAL_OWNERSHIP_TYPES", + "Manage Ownership Types", + "Create, update and delete Ownership Types."); + + public static final List PLATFORM_PRIVILEGES = + ImmutableList.of( + MANAGE_POLICIES_PRIVILEGE, + MANAGE_USERS_AND_GROUPS_PRIVILEGE, + VIEW_ANALYTICS_PRIVILEGE, + GET_ANALYTICS_PRIVILEGE, + MANAGE_DOMAINS_PRIVILEGE, + MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_INGESTION_PRIVILEGE, + MANAGE_SECRETS_PRIVILEGE, + GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE, + MANAGE_ACCESS_TOKENS, + MANAGE_TESTS_PRIVILEGE, + MANAGE_GLOSSARIES_PRIVILEGE, + MANAGE_USER_CREDENTIALS_PRIVILEGE, + MANAGE_TAGS_PRIVILEGE, + CREATE_TAGS_PRIVILEGE, + CREATE_DOMAINS_PRIVILEGE, + CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE, + MANAGE_GLOBAL_VIEWS, + MANAGE_GLOBAL_OWNERSHIP_TYPES); // Resource Privileges // - public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = Privilege.of( - "VIEW_ENTITY_PAGE", - "View Entity Page", - "The ability to view the entity page."); - - public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_TAGS", - "Edit Tags", - "The ability to add and remove tags to an asset."); - - public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_GLOSSARY_TERMS", - "Edit Glossary Terms", - "The ability to add and remove glossary terms to an asset."); - - public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OWNERS", - "Edit Owners", - "The ability to add and remove owners of an entity."); - - public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOCS", - "Edit Description", - "The ability to edit the description (documentation) of an entity."); - - public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DOC_LINKS", - "Edit Links", - "The ability to edit links associated with an entity."); - - public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_STATUS", - "Edit Status", - "The ability to edit the status of an entity (soft deleted or not)."); - - public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = Privilege.of( - "EDIT_DOMAINS_PRIVILEGE", - "Edit Domain", - "The ability to edit the Domain of an entity."); - - public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_DATA_PRODUCTS", - "Edit Data Product", - "The ability to edit the Data Product of an entity."); - - public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = Privilege.of( - "EDIT_DEPRECATION_PRIVILEGE", - "Edit Deprecation", - "The ability to edit the Deprecation status of an entity."); - - public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_ASSERTIONS", - "Edit Assertions", - "The ability to add and remove assertions from an entity."); - - public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_OPERATIONS", - "Edit Operations", - "The ability to report or edit operations information about an entity."); - - public static final Privilege EDIT_ENTITY_PRIVILEGE = Privilege.of( - "EDIT_ENTITY", - "Edit Entity", - "The ability to edit any information about an entity. Super user privileges for the entity."); - - public static final Privilege DELETE_ENTITY_PRIVILEGE = Privilege.of( - "DELETE_ENTITY", - "Delete", - "The ability to delete the delete this entity."); - - public static final Privilege EDIT_LINEAGE_PRIVILEGE = Privilege.of( - "EDIT_LINEAGE", - "Edit Lineage", - "The ability to add and remove lineage edges for this entity."); - - public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_EMBED", - "Edit Embedded Content", - "The ability to edit the embedded content for an entity."); - - public static final List COMMON_ENTITY_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_STATUS_PRIVILEGE, - EDIT_ENTITY_DOMAINS_PRIVILEGE, - EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - DELETE_ENTITY_PRIVILEGE - ); + public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = + Privilege.of("VIEW_ENTITY_PAGE", "View Entity Page", "The ability to view the entity page."); + + public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_TAGS", "Edit Tags", "The ability to add and remove tags to an asset."); + + public static final Privilege EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_GLOSSARY_TERMS", + "Edit Glossary Terms", + "The ability to add and remove glossary terms to an asset."); + + public static final Privilege EDIT_ENTITY_OWNERS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OWNERS", + "Edit Owners", + "The ability to add and remove owners of an entity."); + + public static final Privilege EDIT_ENTITY_DOCS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOCS", + "Edit Description", + "The ability to edit the description (documentation) of an entity."); + + public static final Privilege EDIT_ENTITY_DOC_LINKS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DOC_LINKS", + "Edit Links", + "The ability to edit links associated with an entity."); + + public static final Privilege EDIT_ENTITY_STATUS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_STATUS", + "Edit Status", + "The ability to edit the status of an entity (soft deleted or not)."); + + public static final Privilege EDIT_ENTITY_DOMAINS_PRIVILEGE = + Privilege.of( + "EDIT_DOMAINS_PRIVILEGE", "Edit Domain", "The ability to edit the Domain of an entity."); + + public static final Privilege EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_DATA_PRODUCTS", + "Edit Data Product", + "The ability to edit the Data Product of an entity."); + + public static final Privilege EDIT_ENTITY_DEPRECATION_PRIVILEGE = + Privilege.of( + "EDIT_DEPRECATION_PRIVILEGE", + "Edit Deprecation", + "The ability to edit the Deprecation status of an entity."); + + public static final Privilege EDIT_ENTITY_ASSERTIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_ASSERTIONS", + "Edit Assertions", + "The ability to add and remove assertions from an entity."); + + public static final Privilege EDIT_ENTITY_OPERATIONS_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_OPERATIONS", + "Edit Operations", + "The ability to report or edit operations information about an entity."); + + public static final Privilege EDIT_ENTITY_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY", + "Edit Entity", + "The ability to edit any information about an entity. Super user privileges for the entity."); + + public static final Privilege DELETE_ENTITY_PRIVILEGE = + Privilege.of("DELETE_ENTITY", "Delete", "The ability to delete the delete this entity."); + + public static final Privilege EDIT_LINEAGE_PRIVILEGE = + Privilege.of( + "EDIT_LINEAGE", + "Edit Lineage", + "The ability to add and remove lineage edges for this entity."); + + public static final Privilege EDIT_ENTITY_EMBED_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_EMBED", + "Edit Embedded Content", + "The ability to edit the embedded content for an entity."); + + public static final List COMMON_ENTITY_PRIVILEGES = + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_STATUS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE, + EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE); // Dataset Privileges - public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_TAGS", - "Edit Dataset Column Tags", - "The ability to edit the column (field) tags associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_GLOSSARY_TERMS", - "Edit Dataset Column Glossary Terms", - "The ability to edit the column (field) glossary terms associated with a dataset schema." - ); - - public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = Privilege.of( - "EDIT_DATASET_COL_DESCRIPTION", - "Edit Dataset Column Descriptions", - "The ability to edit the column (field) descriptions associated with a dataset schema." - ); - - public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_USAGE", - "View Dataset Usage", - "The ability to access dataset usage information (includes usage statistics and queries)."); - - public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = Privilege.of( - "VIEW_DATASET_PROFILE", - "View Dataset Profile", - "The ability to access dataset profile (snapshot statistics)"); - - public static final Privilege EDIT_QUERIES_PRIVILEGE = Privilege.of( - "EDIT_ENTITY_QUERIES", - "Edit Dataset Queries", - "The ability to edit the Queries for a Dataset."); + public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_TAGS", + "Edit Dataset Column Tags", + "The ability to edit the column (field) tags associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_GLOSSARY_TERMS", + "Edit Dataset Column Glossary Terms", + "The ability to edit the column (field) glossary terms associated with a dataset schema."); + + public static final Privilege EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE = + Privilege.of( + "EDIT_DATASET_COL_DESCRIPTION", + "Edit Dataset Column Descriptions", + "The ability to edit the column (field) descriptions associated with a dataset schema."); + + public static final Privilege VIEW_DATASET_USAGE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_USAGE", + "View Dataset Usage", + "The ability to access dataset usage information (includes usage statistics and queries)."); + + public static final Privilege VIEW_DATASET_PROFILE_PRIVILEGE = + Privilege.of( + "VIEW_DATASET_PROFILE", + "View Dataset Profile", + "The ability to access dataset profile (snapshot statistics)"); + + public static final Privilege EDIT_QUERIES_PRIVILEGE = + Privilege.of( + "EDIT_ENTITY_QUERIES", + "Edit Dataset Queries", + "The ability to edit the Queries for a Dataset."); // Tag Privileges - public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = Privilege.of( - "EDIT_TAG_COLOR", - "Edit Tag Color", - "The ability to change the color of a Tag."); + public static final Privilege EDIT_TAG_COLOR_PRIVILEGE = + Privilege.of("EDIT_TAG_COLOR", "Edit Tag Color", "The ability to change the color of a Tag."); // Group Privileges - public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = Privilege.of( - "EDIT_GROUP_MEMBERS", - "Edit Group Members", - "The ability to add and remove members to a group."); + public static final Privilege EDIT_GROUP_MEMBERS_PRIVILEGE = + Privilege.of( + "EDIT_GROUP_MEMBERS", + "Edit Group Members", + "The ability to add and remove members to a group."); // User Privileges - public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = Privilege.of( - "EDIT_USER_PROFILE", - "Edit User Profile", - "The ability to change the user's profile including display name, bio, title, profile image, etc."); + public static final Privilege EDIT_USER_PROFILE_PRIVILEGE = + Privilege.of( + "EDIT_USER_PROFILE", + "Edit User Profile", + "The ability to change the user's profile including display name, bio, title, profile image, etc."); // User + Group Privileges - public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = Privilege.of( - "EDIT_CONTACT_INFO", - "Edit Contact Information", - "The ability to change the contact information such as email & chat handles."); + public static final Privilege EDIT_CONTACT_INFO_PRIVILEGE = + Privilege.of( + "EDIT_CONTACT_INFO", + "Edit Contact Information", + "The ability to change the contact information such as email & chat handles."); // Glossary Node Privileges - public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_GLOSSARY_CHILDREN", - "Manage Direct Glossary Children", - "The ability to create and delete the direct children of this entity."); + public static final Privilege MANAGE_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_GLOSSARY_CHILDREN", + "Manage Direct Glossary Children", + "The ability to create and delete the direct children of this entity."); // Glossary Node Privileges - public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = Privilege.of( - "MANAGE_ALL_GLOSSARY_CHILDREN", - "Manage All Glossary Children", - "The ability to create and delete everything underneath this entity."); - - // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL as well) - public static final Privilege GET_TIMELINE_PRIVILEGE = Privilege.of( - "GET_TIMELINE_PRIVILEGE", - "Get Timeline API", - "The ability to use the GET Timeline API."); - - public static final Privilege GET_ENTITY_PRIVILEGE = Privilege.of( - "GET_ENTITY_PRIVILEGE", - "Get Entity + Relationships API", - "The ability to use the GET Entity and Relationships API."); - - public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_ASPECT_PRIVILEGE", - "Get Timeseries Aspect API", - "The ability to use the GET Timeseries Aspect API."); - - public static final Privilege GET_COUNTS_PRIVILEGE = Privilege.of( - "GET_COUNTS_PRIVILEGE", - "Get Aspect/Entity Count APIs", - "The ability to use the GET Aspect/Entity Count APIs."); - - public static final Privilege RESTORE_INDICES_PRIVILEGE = Privilege.of( - "RESTORE_INDICES_PRIVILEGE", - "Restore Indicies API", - "The ability to use the Restore Indices API."); - - public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = Privilege.of( - "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", - "Get Timeseries index sizes API", - "The ability to use the get Timeseries indices size API."); - - public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = Privilege.of( - "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", - "Truncate timeseries aspect index size API", - "The ability to use the API to truncate a timeseries index."); - - public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = Privilege.of( - "GET_ES_TASK_STATUS_PRIVILEGE", - "Get ES task status API", - "The ability to use the get task status API for an ElasticSearch task."); - - public static final Privilege SEARCH_PRIVILEGE = Privilege.of( - "SEARCH_PRIVILEGE", - "Search API", - "The ability to access search APIs."); - - public static final Privilege SET_WRITEABLE_PRIVILEGE = Privilege.of( - "SET_WRITEABLE_PRIVILEGE", - "Enable/Disable Writeability API", - "The ability to enable or disable GMS writeability for data migrations."); - - public static final Privilege APPLY_RETENTION_PRIVILEGE = Privilege.of( - "APPLY_RETENTION_PRIVILEGE", - "Apply Retention API", - "The ability to apply retention using the API."); - - public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = Privilege.of( - "PRODUCE_PLATFORM_EVENT_PRIVILEGE", - "Produce Platform Event API", - "The ability to produce Platform Events using the API."); - - public static final ResourcePrivileges DATASET_PRIVILEGES = ResourcePrivileges.of( - "dataset", - "Datasets", - "Datasets indexed by DataHub", Stream.of( - COMMON_ENTITY_PRIVILEGES, - ImmutableList.of( - VIEW_DATASET_USAGE_PRIVILEGE, - VIEW_DATASET_PROFILE_PRIVILEGE, - EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, - EDIT_DATASET_COL_TAGS_PRIVILEGE, - EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, - EDIT_ENTITY_ASSERTIONS_PRIVILEGE, - EDIT_LINEAGE_PRIVILEGE, - EDIT_ENTITY_EMBED_PRIVILEGE, - EDIT_QUERIES_PRIVILEGE)) - .flatMap(Collection::stream) - .collect(Collectors.toList()) - ); + public static final Privilege MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE = + Privilege.of( + "MANAGE_ALL_GLOSSARY_CHILDREN", + "Manage All Glossary Children", + "The ability to create and delete everything underneath this entity."); + + // REST API Specific Privileges (not adding to lists of privileges above as those affect GraphQL + // as well) + public static final Privilege GET_TIMELINE_PRIVILEGE = + Privilege.of( + "GET_TIMELINE_PRIVILEGE", "Get Timeline API", "The ability to use the GET Timeline API."); + + public static final Privilege GET_ENTITY_PRIVILEGE = + Privilege.of( + "GET_ENTITY_PRIVILEGE", + "Get Entity + Relationships API", + "The ability to use the GET Entity and Relationships API."); + + public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_ASPECT_PRIVILEGE", + "Get Timeseries Aspect API", + "The ability to use the GET Timeseries Aspect API."); + + public static final Privilege GET_COUNTS_PRIVILEGE = + Privilege.of( + "GET_COUNTS_PRIVILEGE", + "Get Aspect/Entity Count APIs", + "The ability to use the GET Aspect/Entity Count APIs."); + + public static final Privilege RESTORE_INDICES_PRIVILEGE = + Privilege.of( + "RESTORE_INDICES_PRIVILEGE", + "Restore Indicies API", + "The ability to use the Restore Indices API."); + + public static final Privilege GET_TIMESERIES_INDEX_SIZES_PRIVILEGE = + Privilege.of( + "GET_TIMESERIES_INDEX_SIZES_PRIVILEGE", + "Get Timeseries index sizes API", + "The ability to use the get Timeseries indices size API."); + + public static final Privilege TRUNCATE_TIMESERIES_INDEX_PRIVILEGE = + Privilege.of( + "TRUNCATE_TIMESERIES_INDEX_PRIVILEGE", + "Truncate timeseries aspect index size API", + "The ability to use the API to truncate a timeseries index."); + + public static final Privilege GET_ES_TASK_STATUS_PRIVILEGE = + Privilege.of( + "GET_ES_TASK_STATUS_PRIVILEGE", + "Get ES task status API", + "The ability to use the get task status API for an ElasticSearch task."); + + public static final Privilege SEARCH_PRIVILEGE = + Privilege.of("SEARCH_PRIVILEGE", "Search API", "The ability to access search APIs."); + + public static final Privilege SET_WRITEABLE_PRIVILEGE = + Privilege.of( + "SET_WRITEABLE_PRIVILEGE", + "Enable/Disable Writeability API", + "The ability to enable or disable GMS writeability for data migrations."); + + public static final Privilege APPLY_RETENTION_PRIVILEGE = + Privilege.of( + "APPLY_RETENTION_PRIVILEGE", + "Apply Retention API", + "The ability to apply retention using the API."); + + public static final Privilege PRODUCE_PLATFORM_EVENT_PRIVILEGE = + Privilege.of( + "PRODUCE_PLATFORM_EVENT_PRIVILEGE", + "Produce Platform Event API", + "The ability to produce Platform Events using the API."); + + public static final ResourcePrivileges DATASET_PRIVILEGES = + ResourcePrivileges.of( + "dataset", + "Datasets", + "Datasets indexed by DataHub", + Stream.of( + COMMON_ENTITY_PRIVILEGES, + ImmutableList.of( + VIEW_DATASET_USAGE_PRIVILEGE, + VIEW_DATASET_PROFILE_PRIVILEGE, + EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE, + EDIT_DATASET_COL_TAGS_PRIVILEGE, + EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_ASSERTIONS_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE, + EDIT_ENTITY_EMBED_PRIVILEGE, + EDIT_QUERIES_PRIVILEGE)) + .flatMap(Collection::stream) + .collect(Collectors.toList())); // Charts Privileges - public static final ResourcePrivileges CHART_PRIVILEGES = ResourcePrivileges.of( - "chart", - "Charts", - "Charts indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges CHART_PRIVILEGES = + ResourcePrivileges.of( + "chart", + "Charts", + "Charts indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Dashboard Privileges - public static final ResourcePrivileges DASHBOARD_PRIVILEGES = ResourcePrivileges.of( - "dashboard", - "Dashboards", - "Dashboards indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DASHBOARD_PRIVILEGES = + ResourcePrivileges.of( + "dashboard", + "Dashboards", + "Dashboards indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE, EDIT_ENTITY_EMBED_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Data Doc Privileges - public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = ResourcePrivileges.of( - "notebook", - "Notebook", - "Notebook indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges NOTEBOOK_PRIVILEGES = + ResourcePrivileges.of( + "notebook", "Notebook", "Notebook indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Data Flow Privileges - public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = ResourcePrivileges.of( - "dataFlow", - "Data Pipelines", - "Data Pipelines indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges DATA_FLOW_PRIVILEGES = + ResourcePrivileges.of( + "dataFlow", + "Data Pipelines", + "Data Pipelines indexed by DataHub", + COMMON_ENTITY_PRIVILEGES); // Data Job Privileges - public static final ResourcePrivileges DATA_JOB_PRIVILEGES = ResourcePrivileges.of( - "dataJob", - "Data Tasks", - "Data Tasks indexed by DataHub", - Stream.concat( - COMMON_ENTITY_PRIVILEGES.stream(), - ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) - .collect(Collectors.toList()) - ); + public static final ResourcePrivileges DATA_JOB_PRIVILEGES = + ResourcePrivileges.of( + "dataJob", + "Data Tasks", + "Data Tasks indexed by DataHub", + Stream.concat( + COMMON_ENTITY_PRIVILEGES.stream(), + ImmutableList.of(EDIT_LINEAGE_PRIVILEGE).stream()) + .collect(Collectors.toList())); // Tag Privileges - public static final ResourcePrivileges TAG_PRIVILEGES = ResourcePrivileges.of( - "tag", - "Tags", - "Tags indexed by DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_TAG_COLOR_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges TAG_PRIVILEGES = + ResourcePrivileges.of( + "tag", + "Tags", + "Tags indexed by DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_TAG_COLOR_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)); // Container Privileges - public static final ResourcePrivileges CONTAINER_PRIVILEGES = ResourcePrivileges.of( - "container", - "Containers", - "Containers indexed by DataHub", - COMMON_ENTITY_PRIVILEGES - ); + public static final ResourcePrivileges CONTAINER_PRIVILEGES = + ResourcePrivileges.of( + "container", "Containers", "Containers indexed by DataHub", COMMON_ENTITY_PRIVILEGES); // Domain Privileges - public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = Privilege.of( - "MANAGE_DATA_PRODUCTS", - "Manage Data Products", - "The ability to create, edit, and delete Data Products within a Domain"); - + public static final Privilege MANAGE_DATA_PRODUCTS_PRIVILEGE = + Privilege.of( + "MANAGE_DATA_PRODUCTS", + "Manage Data Products", + "The ability to create, edit, and delete Data Products within a Domain"); // Domain Privileges - public static final ResourcePrivileges DOMAIN_PRIVILEGES = ResourcePrivileges.of( - "domain", - "Domains", - "Domains created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, MANAGE_DATA_PRODUCTS_PRIVILEGE) - ); + public static final ResourcePrivileges DOMAIN_PRIVILEGES = + ResourcePrivileges.of( + "domain", + "Domains", + "Domains created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + MANAGE_DATA_PRODUCTS_PRIVILEGE)); // Data Product Privileges - public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = ResourcePrivileges.of( - "dataProduct", - "Data Products", - "Data Products created on DataHub", - ImmutableList.of(VIEW_ENTITY_PAGE_PRIVILEGE, EDIT_ENTITY_OWNERS_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, EDIT_ENTITY_TAGS_PRIVILEGE, - EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, EDIT_ENTITY_DOMAINS_PRIVILEGE) - ); + public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = + ResourcePrivileges.of( + "dataProduct", + "Data Products", + "Data Products created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + EDIT_ENTITY_TAGS_PRIVILEGE, + EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, + EDIT_ENTITY_DOMAINS_PRIVILEGE)); // Glossary Term Privileges - public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = ResourcePrivileges.of( - "glossaryTerm", - "Glossary Terms", - "Glossary Terms created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = + ResourcePrivileges.of( + "glossaryTerm", + "Glossary Terms", + "Glossary Terms created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // Glossary Node Privileges - public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = ResourcePrivileges.of( - "glossaryNode", - "Glossary Term Groups", - "Glossary Term Groups created on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_DOC_LINKS_PRIVILEGE, - EDIT_ENTITY_DEPRECATION_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE, - MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, - MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE) - ); + public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = + ResourcePrivileges.of( + "glossaryNode", + "Glossary Term Groups", + "Glossary Term Groups created on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_DOC_LINKS_PRIVILEGE, + EDIT_ENTITY_DEPRECATION_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, + MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)); // Group Privileges - public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = ResourcePrivileges.of( - "corpGroup", - "Groups", - "Groups on DataHub", - ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_ENTITY_OWNERS_PRIVILEGE, - EDIT_GROUP_MEMBERS_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_ENTITY_DOCS_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); + public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = + ResourcePrivileges.of( + "corpGroup", + "Groups", + "Groups on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_ENTITY_OWNERS_PRIVILEGE, + EDIT_GROUP_MEMBERS_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_ENTITY_DOCS_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); // User Privileges - public static final ResourcePrivileges CORP_USER_PRIVILEGES = ResourcePrivileges.of( - "corpuser", - "Users", - "Users on DataHub", + public static final ResourcePrivileges CORP_USER_PRIVILEGES = + ResourcePrivileges.of( + "corpuser", + "Users", + "Users on DataHub", + ImmutableList.of( + VIEW_ENTITY_PAGE_PRIVILEGE, + EDIT_CONTACT_INFO_PRIVILEGE, + EDIT_USER_PROFILE_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)); + + public static final List ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - VIEW_ENTITY_PAGE_PRIVILEGE, - EDIT_CONTACT_INFO_PRIVILEGE, - EDIT_USER_PROFILE_PRIVILEGE, - EDIT_ENTITY_PRIVILEGE) - ); - - public static final List ENTITY_RESOURCE_PRIVILEGES = ImmutableList.of( - DATASET_PRIVILEGES, - DASHBOARD_PRIVILEGES, - CHART_PRIVILEGES, - DATA_FLOW_PRIVILEGES, - DATA_JOB_PRIVILEGES, - TAG_PRIVILEGES, - CONTAINER_PRIVILEGES, - DOMAIN_PRIVILEGES, - GLOSSARY_TERM_PRIVILEGES, - GLOSSARY_NODE_PRIVILEGES, - CORP_GROUP_PRIVILEGES, - CORP_USER_PRIVILEGES, - NOTEBOOK_PRIVILEGES, - DATA_PRODUCT_PRIVILEGES - ); + DATASET_PRIVILEGES, + DASHBOARD_PRIVILEGES, + CHART_PRIVILEGES, + DATA_FLOW_PRIVILEGES, + DATA_JOB_PRIVILEGES, + TAG_PRIVILEGES, + CONTAINER_PRIVILEGES, + DOMAIN_PRIVILEGES, + GLOSSARY_TERM_PRIVILEGES, + GLOSSARY_NODE_PRIVILEGES, + CORP_GROUP_PRIVILEGES, + CORP_USER_PRIVILEGES, + NOTEBOOK_PRIVILEGES, + DATA_PRODUCT_PRIVILEGES); // Merge all entity specific resource privileges to create a superset of all resource privileges - public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = ResourcePrivileges.of( - "all", - "All Types", - "All Types", - ENTITY_RESOURCE_PRIVILEGES.stream().flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()).distinct().collect( - Collectors.toList()) - ); + public static final ResourcePrivileges ALL_RESOURCE_PRIVILEGES = + ResourcePrivileges.of( + "all", + "All Types", + "All Types", + ENTITY_RESOURCE_PRIVILEGES.stream() + .flatMap(resourcePrivileges -> resourcePrivileges.getPrivileges().stream()) + .distinct() + .collect(Collectors.toList())); public static final List RESOURCE_PRIVILEGES = - ImmutableList.builder().addAll(ENTITY_RESOURCE_PRIVILEGES) + ImmutableList.builder() + .addAll(ENTITY_RESOURCE_PRIVILEGES) .add(ALL_RESOURCE_PRIVILEGES) .build(); @@ -593,9 +623,10 @@ static ResourcePrivileges of( String resourceTypeDisplayName, String resourceTypeDescription, List privileges) { - return new ResourcePrivileges(resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); + return new ResourcePrivileges( + resourceType, resourceTypeDisplayName, resourceTypeDescription, privileges); } } - private PoliciesConfig() { } + private PoliciesConfig() {} } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java index 2d4e355a93e53..00342ff2afbe2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/DefaultRestliClientFactory.java @@ -9,31 +9,29 @@ import com.linkedin.r2.transport.common.bridge.client.TransportClientAdapter; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.RestClient; -import org.apache.commons.lang.StringUtils; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLParameters; import java.net.URI; import java.security.InvalidParameterException; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.HashMap; import java.util.Map; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLParameters; +import org.apache.commons.lang.StringUtils; public class DefaultRestliClientFactory { private static final String DEFAULT_REQUEST_TIMEOUT_IN_MS = "10000"; - private DefaultRestliClientFactory() { - } + private DefaultRestliClientFactory() {} @Nonnull - public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, - @Nonnull String restLiClientD2ZkPath) { - final D2Client d2Client = new D2ClientBuilder() + public static RestClient getRestLiD2Client( + @Nonnull String restLiClientD2ZkHost, @Nonnull String restLiClientD2ZkPath) { + final D2Client d2Client = + new D2ClientBuilder() .setZkHosts(restLiClientD2ZkHost) .setBasePath(restLiClientD2ZkPath) .build(); @@ -42,18 +40,27 @@ public static RestClient getRestLiD2Client(@Nonnull String restLiClientD2ZkHost, } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol) { return getRestLiClient(restLiServerHost, restLiServerPort, useSSL, sslProtocol, null); } @Nonnull - public static RestClient getRestLiClient(@Nonnull String restLiServerHost, int restLiServerPort, boolean useSSL, - @Nullable String sslProtocol, @Nullable Map params) { + public static RestClient getRestLiClient( + @Nonnull String restLiServerHost, + int restLiServerPort, + boolean useSSL, + @Nullable String sslProtocol, + @Nullable Map params) { return getRestLiClient( - URI.create(String.format("%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), - sslProtocol, - params); + URI.create( + String.format( + "%s://%s:%s", useSSL ? "https" : "http", restLiServerHost, restLiServerPort)), + sslProtocol, + params); } @Nonnull @@ -62,8 +69,10 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s } @Nonnull - public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String sslProtocol, - @Nullable Map inputParams) { + public static RestClient getRestLiClient( + @Nonnull URI gmsUri, + @Nullable String sslProtocol, + @Nullable Map inputParams) { if (StringUtils.isBlank(gmsUri.getHost()) || gmsUri.getPort() <= 0) { throw new InvalidParameterException("Invalid restli server host name or port!"); } @@ -82,7 +91,7 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s SSLParameters sslParameters = new SSLParameters(); if (sslProtocol != null) { - sslParameters.setProtocols(new String[]{sslProtocol}); + sslParameters.setProtocols(new String[] {sslProtocol}); } params.put(HttpClientFactory.HTTP_SSL_PARAMS, sslParameters); } @@ -90,7 +99,8 @@ public static RestClient getRestLiClient(@Nonnull URI gmsUri, @Nullable String s return getHttpRestClient(gmsUri, params); } - private static RestClient getHttpRestClient(@Nonnull URI gmsUri, @Nonnull Map params) { + private static RestClient getHttpRestClient( + @Nonnull URI gmsUri, @Nonnull Map params) { Map finalParams = new HashMap<>(); finalParams.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, DEFAULT_REQUEST_TIMEOUT_IN_MS); finalParams.putAll(params); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java index 09220bb481a03..737f79dc1c441 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/restli/RestliUtil.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class RestliUtil { private RestliUtil() { @@ -19,8 +18,9 @@ private RestliUtil() { } /** - * Executes the provided supplier and convert the results to a {@link Task}. - * Exceptions thrown during the execution will be properly wrapped in {@link RestLiServiceException}. + * Executes the provided supplier and convert the results to a {@link Task}. Exceptions thrown + * during the execution will be properly wrapped in {@link RestLiServiceException}. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ @@ -31,7 +31,8 @@ public static Task toTask(@Nonnull Supplier supplier) { } catch (Throwable throwable) { // Convert IllegalArgumentException to BAD REQUEST - if (throwable instanceof IllegalArgumentException || throwable.getCause() instanceof IllegalArgumentException) { + if (throwable instanceof IllegalArgumentException + || throwable.getCause() instanceof IllegalArgumentException) { throwable = badRequestException(throwable.getMessage()); } @@ -47,20 +48,24 @@ public static Task toTask(@Nonnull Supplier supplier) { public static Task toTask(@Nonnull Supplier supplier, String metricName) { Timer.Context context = MetricUtils.timer(metricName).time(); // Stop timer on success and failure - return toTask(supplier).transform(orig -> { - context.stop(); - if (orig.isFailed()) { - MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); - } else { - MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); - } - return orig; - }); + return toTask(supplier) + .transform( + orig -> { + context.stop(); + if (orig.isFailed()) { + MetricUtils.counter(MetricRegistry.name(metricName, "failed")).inc(); + } else { + MetricUtils.counter(MetricRegistry.name(metricName, "success")).inc(); + } + return orig; + }); } /** - * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} instead. - * A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the optional is emtpy. + * Similar to {@link #toTask(Supplier)} but the supplier is expected to return an {@link Optional} + * instead. A {@link RestLiServiceException} with 404 HTTP status code will be thrown if the + * optional is emtpy. + * * @param supplier The supplier to execute * @return A parseq {@link Task} */ diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java index 913def2a040f4..5f3975b066fde 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/AuditStampUtils.java @@ -1,21 +1,19 @@ package com.linkedin.metadata.utils; +import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import lombok.extern.slf4j.Slf4j; - import java.time.Clock; - -import static com.linkedin.metadata.Constants.SYSTEM_ACTOR; +import lombok.extern.slf4j.Slf4j; @Slf4j public class AuditStampUtils { - private AuditStampUtils() { - } + private AuditStampUtils() {} - public static AuditStamp createDefaultAuditStamp() { - return new AuditStamp() - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) - .setTime(Clock.systemUTC().millis()); - } + public static AuditStamp createDefaultAuditStamp() { + return new AuditStamp() + .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) + .setTime(Clock.systemUTC().millis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java index 3b2116fa65127..f03d4c76c70d8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/BrowseUtil.java @@ -8,27 +8,29 @@ import com.linkedin.metadata.query.BrowseResultMetadata; import java.util.stream.Collectors; - public class BrowseUtil { - private BrowseUtil() { - } + private BrowseUtil() {} - public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult(BrowseResult browseResult) { - com.linkedin.metadata.query.BrowseResult legacyResult = new com.linkedin.metadata.query.BrowseResult(); + public static com.linkedin.metadata.query.BrowseResult convertToLegacyResult( + BrowseResult browseResult) { + com.linkedin.metadata.query.BrowseResult legacyResult = + new com.linkedin.metadata.query.BrowseResult(); legacyResult.setFrom(browseResult.getFrom()); legacyResult.setPageSize(browseResult.getPageSize()); legacyResult.setNumEntities(browseResult.getNumEntities()); - legacyResult.setEntities(new BrowseResultEntityArray(browseResult.getEntities() - .stream() - .map(entity -> new BrowseResultEntity(entity.data())) - .collect(Collectors.toList()))); + legacyResult.setEntities( + new BrowseResultEntityArray( + browseResult.getEntities().stream() + .map(entity -> new BrowseResultEntity(entity.data())) + .collect(Collectors.toList()))); BrowseResultMetadata legacyMetadata = new BrowseResultMetadata(); - legacyMetadata.setGroups(new BrowseResultGroupArray(browseResult.getGroups() - .stream() - .map(group -> new BrowseResultGroup(group.data())) - .collect(Collectors.toList()))); + legacyMetadata.setGroups( + new BrowseResultGroupArray( + browseResult.getGroups().stream() + .map(group -> new BrowseResultGroup(group.data())) + .collect(Collectors.toList()))); legacyMetadata.setPath(browseResult.getMetadata().getPath()); legacyMetadata.setTotalNumEntities(browseResult.getMetadata().getTotalNumEntities()); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java index 551683153aadd..32422b2a2d4a8 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/ConcurrencyUtils.java @@ -11,62 +11,79 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class ConcurrencyUtils { - private ConcurrencyUtils() { - } + private ConcurrencyUtils() {} - public static List transformAndCollectAsync(List originalList, Function transformer) { + public static List transformAndCollectAsync( + List originalList, Function transformer) { return transformAndCollectAsync(originalList, transformer, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion i.e. each element transform is run as a separate CompleteableFuture and then joined at + * the end */ - public static OUTPUT transformAndCollectAsync(Collection originalCollection, - Function transformer, Collector collector) { + public static OUTPUT transformAndCollectAsync( + Collection originalCollection, + Function transformer, + Collector collector) { return originalCollection.stream() .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element))) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } - /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static List transformAndCollectAsync(List originalList, Function transformer, + public static List transformAndCollectAsync( + List originalList, + Function transformer, BiFunction exceptionHandler) { - return transformAndCollectAsync(originalList, transformer, exceptionHandler, Collectors.toList()); + return transformAndCollectAsync( + originalList, transformer, exceptionHandler, Collectors.toList()); } /** - * Transforms original list into the final list using the function transformer in an asynchronous fashion - * with exceptions handled by the input exceptionHandler - * i.e. each element transform is run as a separate CompleteableFuture and then joined at the end + * Transforms original list into the final list using the function transformer in an asynchronous + * fashion with exceptions handled by the input exceptionHandler i.e. each element transform is + * run as a separate CompleteableFuture and then joined at the end */ - public static OUTPUT transformAndCollectAsync(Collection originalCollection, - Function transformer, BiFunction exceptionHandler, Collector collector) { + public static OUTPUT transformAndCollectAsync( + Collection originalCollection, + Function transformer, + BiFunction exceptionHandler, + Collector collector) { return originalCollection.stream() - .map(element -> CompletableFuture.supplyAsync(() -> transformer.apply(element)) - .exceptionally(e -> exceptionHandler.apply(element, e))) + .map( + element -> + CompletableFuture.supplyAsync(() -> transformer.apply(element)) + .exceptionally(e -> exceptionHandler.apply(element, e))) .filter(Objects::nonNull) - .collect(Collectors.collectingAndThen(Collectors.toList(), - completableFutureList -> completableFutureList.stream().map(CompletableFuture::join))) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), + completableFutureList -> + completableFutureList.stream().map(CompletableFuture::join))) .collect(collector); } /** - * Wait for a list of futures to end with a timeout and only return results that were returned before the timeout - * expired + * Wait for a list of futures to end with a timeout and only return results that were returned + * before the timeout expired */ - public static List getAllCompleted(List> futuresList, long timeout, TimeUnit unit) { - CompletableFuture allFuturesResult = CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); + public static List getAllCompleted( + List> futuresList, long timeout, TimeUnit unit) { + CompletableFuture allFuturesResult = + CompletableFuture.allOf(futuresList.toArray(new CompletableFuture[0])); try { allFuturesResult.get(timeout, unit); } catch (Exception e) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java index 441661497cadc..a40cf4da7abbc 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/DataPlatformInstanceUtils.java @@ -19,17 +19,16 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DataPlatformInstanceUtils { - private DataPlatformInstanceUtils() { - } + private DataPlatformInstanceUtils() {} private static DataPlatformUrn getPlatformUrn(String name) { return new DataPlatformUrn(name.toLowerCase()); } - private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) throws URISyntaxException { + private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyAspect) + throws URISyntaxException { switch (entityType) { case "dataset": return ((DatasetKey) keyAspect).getPlatform(); @@ -40,7 +39,8 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "dataFlow": return getPlatformUrn(((DataFlowKey) keyAspect).getOrchestrator()); case "dataJob": - return getPlatformUrn(DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); + return getPlatformUrn( + DataFlowUrn.createFromUrn(((DataJobKey) keyAspect).getFlow()).getOrchestratorEntity()); case "dataProcess": return getPlatformUrn(((DataProcessKey) keyAspect).getOrchestrator()); case "mlModel": @@ -52,17 +52,23 @@ private static Urn getDefaultDataPlatform(String entityType, RecordTemplate keyA case "mlModelGroup": return ((MLModelGroupKey) keyAspect).getPlatform(); default: - log.debug(String.format("Failed to generate default platform for unknown entity type %s", entityType)); + log.debug( + String.format( + "Failed to generate default platform for unknown entity type %s", entityType)); return null; } } - public static Optional buildDataPlatformInstance(String entityType, RecordTemplate keyAspect) { + public static Optional buildDataPlatformInstance( + String entityType, RecordTemplate keyAspect) { try { return Optional.ofNullable(getDefaultDataPlatform(entityType, keyAspect)) .map(platform -> new DataPlatformInstance().setPlatform(platform)); } catch (URISyntaxException e) { - log.error("Failed to generate data platform instance for entity {}, keyAspect {}", entityType, keyAspect); + log.error( + "Failed to generate data platform instance for entity {}, keyAspect {}", + entityType, + keyAspect); return Optional.empty(); } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java index 2ad2d5028ba7d..161b0f845f7e2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/EntityKeyUtils.java @@ -16,17 +16,16 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - @Slf4j public class EntityKeyUtils { private static final Logger logger = LoggerFactory.getLogger(EntityKeyUtils.class); - private EntityKeyUtils() { - } + private EntityKeyUtils() {} @Nonnull - public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { + public static Urn getUrnFromProposal( + MetadataChangeProposal metadataChangeProposal, AspectSpec keyAspectSpec) { if (metadataChangeProposal.hasEntityUrn()) { Urn urn = metadataChangeProposal.getEntityUrn(); @@ -39,11 +38,13 @@ public static Urn getUrnFromProposal(MetadataChangeProposal metadataChangePropos return urn; } if (metadataChangeProposal.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( metadataChangeProposal.getEntityKeyAspect().getValue(), metadataChangeProposal.getEntityKeyAspect().getContentType(), keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeProposal.getEntityType()); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeProposal.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } @@ -61,39 +62,46 @@ public static Urn getUrnFromLog(MetadataChangeLog metadataChangeLog, AspectSpec return urn; } if (metadataChangeLog.hasEntityKeyAspect()) { - RecordTemplate keyAspectRecord = GenericRecordUtils.deserializeAspect( - metadataChangeLog.getEntityKeyAspect().getValue(), - metadataChangeLog.getEntityKeyAspect().getContentType(), - keyAspectSpec); - return EntityKeyUtils.convertEntityKeyToUrn(keyAspectRecord, metadataChangeLog.getEntityType()); + RecordTemplate keyAspectRecord = + GenericRecordUtils.deserializeAspect( + metadataChangeLog.getEntityKeyAspect().getValue(), + metadataChangeLog.getEntityKeyAspect().getContentType(), + keyAspectSpec); + return EntityKeyUtils.convertEntityKeyToUrn( + keyAspectRecord, metadataChangeLog.getEntityType()); } throw new IllegalArgumentException("One of urn and keyAspect must be set"); } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link AspectSpec} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link AspectSpec} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field index. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + *

Parts of the urn are bound into fields in the keySchema based on field index. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keyAspectSpec key aspect spec - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { + public static RecordTemplate convertUrnToEntityKey( + @Nonnull final Urn urn, @Nonnull final AspectSpec keyAspectSpec) { RecordDataSchema keySchema = keyAspectSpec.getPegasusSchema(); // #1. Ensure we have a class to bind into. - Class clazz = keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); + Class clazz = + keyAspectSpec.getDataTemplateClass().asSubclass(RecordTemplate.class); // #2. Bind fields into a DataMap if (urn.getEntityKey().getParts().size() != keySchema.getFields().size()) { throw new IllegalArgumentException( - "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + urn); + "Failed to convert urn to entity key: urns parts and key fields do not have same length for " + + urn); } final DataMap dataMap = new DataMap(); for (int i = 0; i < urn.getEntityKey().getParts().size(); i++) { @@ -107,28 +115,35 @@ public static RecordTemplate convertUrnToEntityKey(@Nonnull final Urn urn, @Nonn try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given - * the urn & the {@link RecordDataSchema} of the key. + * Implicitly converts a normal {@link Urn} into a {@link RecordTemplate} Entity Key given the urn + * & the {@link RecordDataSchema} of the key. * - * Parts of the urn are bound into fields in the keySchema based on field index. If the - * number of urn key parts does not match the number of fields in the key schema, an {@link IllegalArgumentException} will be thrown. + *

Parts of the urn are bound into fields in the keySchema based on field index. If the + * number of urn key parts does not match the number of fields in the key schema, an {@link + * IllegalArgumentException} will be thrown. * * @param urn raw entity urn * @param keySchema schema of the entity key - * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of - * the provided key schema in order. - * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema (field number or type mismatch) + * @return a {@link RecordTemplate} created by mapping the fields of the urn to fields of the + * provided key schema in order. + * @throws {@link IllegalArgumentException} if the urn cannot be converted into the key schema + * (field number or type mismatch) */ @Nonnull - public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { + public static RecordTemplate convertUrnToEntityKeyInternal( + @Nonnull final Urn urn, @Nonnull final RecordDataSchema keySchema) { // #1. Ensure we have a class to bind into. Class clazz; @@ -136,8 +151,10 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur clazz = Class.forName(keySchema.getFullName()).asSubclass(RecordTemplate.class); } catch (ClassNotFoundException e) { throw new IllegalArgumentException( - String.format("Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", - keySchema.getFullName()), e); + String.format( + "Failed to find RecordTemplate class associated with provided RecordDataSchema named %s", + keySchema.getFullName()), + e); } // #2. Bind fields into a DataMap @@ -157,29 +174,37 @@ public static RecordTemplate convertUrnToEntityKeyInternal(@Nonnull final Urn ur try { constructor = clazz.getConstructor(DataMap.class); return constructor.newInstance(dataMap); - } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) { + } catch (NoSuchMethodException + | InstantiationException + | IllegalAccessException + | InvocationTargetException e) { throw new IllegalArgumentException( - String.format("Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", + String.format( + "Failed to instantiate RecordTemplate with name %s. Missing constructor taking DataMap as arg.", clazz.getName())); } } /** - * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} string. + * Implicitly converts an Entity Key {@link RecordTemplate} into the corresponding {@link Urn} + * string. * - * Parts of the key record are bound into fields in the urn based on field index. + *

Parts of the key record are bound into fields in the urn based on field index. * * @param keyAspect a {@link RecordTemplate} representing the key. * @param entityName name of the entity to use during Urn construction * @return an {@link Urn} created by binding the fields of the key aspect to an Urn. */ @Nonnull - public static Urn convertEntityKeyToUrn(@Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { + public static Urn convertEntityKeyToUrn( + @Nonnull final RecordTemplate keyAspect, @Nonnull final String entityName) { final List urnParts = new ArrayList<>(); for (RecordDataSchema.Field field : keyAspect.schema().getFields()) { Object value = keyAspect.data().get(field.getName()); String valueString = value == null ? "" : value.toString(); - urnParts.add(valueString); // TODO: Determine whether all fields, including urns, should be URL encoded. + urnParts.add( + valueString); // TODO: Determine whether all fields, including urns, should be URL + // encoded. } return Urn.createFromTuple(entityName, urnParts); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java index 3ef415b4d31be..fc28367e6c7ee 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/GenericRecordUtils.java @@ -1,54 +1,51 @@ package com.linkedin.metadata.utils; +import com.datahub.util.RecordUtils; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.GenericPayload; import java.nio.charset.StandardCharsets; import javax.annotation.Nonnull; - public class GenericRecordUtils { public static final String JSON = "application/json"; - private GenericRecordUtils() { - } + private GenericRecordUtils() {} - /** - * Deserialize the given value into the aspect based on the input aspectSpec - */ + /** Deserialize the given value into the aspect based on the input aspectSpec */ @Nonnull - public static RecordTemplate deserializeAspect(@Nonnull ByteString aspectValue, @Nonnull String contentType, + public static RecordTemplate deserializeAspect( + @Nonnull ByteString aspectValue, + @Nonnull String contentType, @Nonnull AspectSpec aspectSpec) { return deserializeAspect(aspectValue, contentType, aspectSpec.getDataTemplateClass()); } @Nonnull - public static T deserializeAspect(@Nonnull ByteString aspectValue, - @Nonnull String contentType, @Nonnull Class clazz) { + public static T deserializeAspect( + @Nonnull ByteString aspectValue, @Nonnull String contentType, @Nonnull Class clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, aspectValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull String contentType, - @Nonnull Class clazz) { + @Nonnull ByteString payloadValue, @Nonnull String contentType, @Nonnull Class clazz) { if (!contentType.equals(JSON)) { - throw new IllegalArgumentException(String.format("%s content type is not supported", contentType)); + throw new IllegalArgumentException( + String.format("%s content type is not supported", contentType)); } return RecordUtils.toRecordTemplate(clazz, payloadValue.asString(StandardCharsets.UTF_8)); } @Nonnull public static T deserializePayload( - @Nonnull ByteString payloadValue, - @Nonnull Class clazz) { + @Nonnull ByteString payloadValue, @Nonnull Class clazz) { return deserializePayload(payloadValue, JSON, clazz); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java index d923005c8c023..d9a4768ada05f 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/IngestionUtils.java @@ -1,36 +1,38 @@ package com.linkedin.metadata.utils; +import javax.annotation.Nonnull; import org.json.JSONException; import org.json.JSONObject; -import javax.annotation.Nonnull; - - public class IngestionUtils { private static final String PIPELINE_NAME = "pipeline_name"; - private IngestionUtils() { - } + private IngestionUtils() {} /** - * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. - * The pipeline_name will be the urn of the ingestion source. + * Injects a pipeline_name into a recipe if there isn't a pipeline_name already there. The + * pipeline_name will be the urn of the ingestion source. * * @param pipelineName the new pipeline name in the recipe. * @return a modified recipe JSON string */ - public static String injectPipelineName(@Nonnull String originalJson, @Nonnull final String pipelineName) { + public static String injectPipelineName( + @Nonnull String originalJson, @Nonnull final String pipelineName) { try { final JSONObject jsonRecipe = new JSONObject(originalJson); - boolean hasPipelineName = jsonRecipe.has(PIPELINE_NAME) && jsonRecipe.get(PIPELINE_NAME) != null && !jsonRecipe.get(PIPELINE_NAME).equals(""); + boolean hasPipelineName = + jsonRecipe.has(PIPELINE_NAME) + && jsonRecipe.get(PIPELINE_NAME) != null + && !jsonRecipe.get(PIPELINE_NAME).equals(""); if (!hasPipelineName) { jsonRecipe.put(PIPELINE_NAME, pipelineName); return jsonRecipe.toString(); } } catch (JSONException e) { - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided.", e); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided.", e); } return originalJson; } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java index 9794d101ecda9..cde83c1382283 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/PegasusUtils.java @@ -17,49 +17,66 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * Static utility class providing methods for extracting entity metadata from Pegasus models. - */ +/** Static utility class providing methods for extracting entity metadata from Pegasus models. */ @Slf4j public class PegasusUtils { - private PegasusUtils() { - } + private PegasusUtils() {} public static String getEntityNameFromSchema(final RecordDataSchema entitySnapshotSchema) { - final Object entityAnnotationObj = entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); + final Object entityAnnotationObj = + entitySnapshotSchema.getProperties().get(EntityAnnotation.ANNOTATION_NAME); if (entityAnnotationObj != null) { - return EntityAnnotation.fromSchemaProperty(entityAnnotationObj, entitySnapshotSchema.getFullName()).getName(); + return EntityAnnotation.fromSchemaProperty( + entityAnnotationObj, entitySnapshotSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + log.error( + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract entity name from provided schema %s", entitySnapshotSchema.getName())); + String.format( + "Failed to extract entity name from provided schema %s", + entitySnapshotSchema.getName())); } // TODO: Figure out a better iteration strategy. - public static String getAspectNameFromFullyQualifiedName(final String fullyQualifiedRecordTemplateName) { - final RecordTemplate template = RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); + public static String getAspectNameFromFullyQualifiedName( + final String fullyQualifiedRecordTemplateName) { + final RecordTemplate template = + RecordUtils.toRecordTemplate(fullyQualifiedRecordTemplateName, new DataMap()); final RecordDataSchema aspectSchema = template.schema(); return getAspectNameFromSchema(aspectSchema); } public static String getAspectNameFromSchema(final RecordDataSchema aspectSchema) { - final Object aspectAnnotationObj = aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); + final Object aspectAnnotationObj = + aspectSchema.getProperties().get(AspectAnnotation.ANNOTATION_NAME); if (aspectAnnotationObj != null) { - return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()).getName(); + return AspectAnnotation.fromSchemaProperty(aspectAnnotationObj, aspectSchema.getFullName()) + .getName(); } - log.error(String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + log.error( + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); throw new IllegalArgumentException( - String.format("Failed to extract aspect name from provided schema %s", aspectSchema.getName())); + String.format( + "Failed to extract aspect name from provided schema %s", aspectSchema.getName())); } - public static Class getDataTemplateClassFromSchema(final NamedDataSchema schema, final Class clazz) { + public static Class getDataTemplateClassFromSchema( + final NamedDataSchema schema, final Class clazz) { try { - return Class.forName(schema.getFullName()).asSubclass(clazz); + return Class.forName(schema.getFullName()).asSubclass(clazz); } catch (ClassNotFoundException e) { - log.error("Unable to find class for RecordDataSchema named " + schema.getFullName() + " " + e.getMessage()); - throw new ModelConversionException("Unable to find class for RecordDataSchema named " + schema.getFullName(), e); + log.error( + "Unable to find class for RecordDataSchema named " + + schema.getFullName() + + " " + + e.getMessage()); + throw new ModelConversionException( + "Unable to find class for RecordDataSchema named " + schema.getFullName(), e); } } @@ -67,9 +84,17 @@ public static String urnToEntityName(final Urn urn) { return urn.getEntityType(); } - public static MetadataChangeLog constructMCL(@Nullable MetadataChangeProposal base, String entityName, Urn urn, ChangeType changeType, - String aspectName, AuditStamp auditStamp, RecordTemplate newAspectValue, SystemMetadata newSystemMetadata, - RecordTemplate oldAspectValue, SystemMetadata oldSystemMetadata) { + public static MetadataChangeLog constructMCL( + @Nullable MetadataChangeProposal base, + String entityName, + Urn urn, + ChangeType changeType, + String aspectName, + AuditStamp auditStamp, + RecordTemplate newAspectValue, + SystemMetadata newSystemMetadata, + RecordTemplate oldAspectValue, + SystemMetadata oldSystemMetadata) { final MetadataChangeLog metadataChangeLog; if (base != null) { metadataChangeLog = new MetadataChangeLog(new DataMap(base.data())); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java index 69bd3b461eb12..35e15c1e5b693 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SearchUtil.java @@ -8,23 +8,19 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.FilterValue; - +import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.net.URISyntaxException; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; - -import com.linkedin.metadata.utils.elasticsearch.IndexConvention; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilders; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - - @Slf4j public class SearchUtil { public static final String AGGREGATION_SEPARATOR_CHAR = "␞"; @@ -33,23 +29,29 @@ public class SearchUtil { private static final String URN_PREFIX = "urn:"; private static final String REMOVED = "removed"; - private SearchUtil() { - } + private SearchUtil() {} /* * @param aggregations the aggregations coming back from elasticsearch combined with the filters from the search request * @param filteredValues the set of values provided by the search request */ - public static List convertToFilters(Map aggregations, Set filteredValues) { - return aggregations.entrySet().stream().map(entry -> { - return createFilterValue(entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); - }).sorted(Comparator.comparingLong(value -> -value.getFacetCount())).collect(Collectors.toList()); + public static List convertToFilters( + Map aggregations, Set filteredValues) { + return aggregations.entrySet().stream() + .map( + entry -> { + return createFilterValue( + entry.getKey(), entry.getValue(), filteredValues.contains(entry.getKey())); + }) + .sorted(Comparator.comparingLong(value -> -value.getFacetCount())) + .collect(Collectors.toList()); } public static FilterValue createFilterValue(String value, Long facetCount, Boolean isFilteredOn) { // TODO(indy): test this String[] aggregationTokens = value.split(AGGREGATION_SEPARATOR_CHAR); - FilterValue result = new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); + FilterValue result = + new FilterValue().setValue(value).setFacetCount(facetCount).setFiltered(isFilteredOn); String lastValue = aggregationTokens[aggregationTokens.length - 1]; if (lastValue.startsWith(URN_PREFIX)) { try { @@ -61,56 +63,77 @@ public static FilterValue createFilterValue(String value, Long facetCount, Boole return result; } - private static Criterion transformEntityTypeCriterion(Criterion criterion, IndexConvention indexConvention) { - return criterion.setField("_index").setValues( - new StringArray(criterion.getValues().stream().map(value -> String.join("", value.split("_"))) - .map(indexConvention::getEntityIndexName) - .collect(Collectors.toList()))) - .setValue(indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); + private static Criterion transformEntityTypeCriterion( + Criterion criterion, IndexConvention indexConvention) { + return criterion + .setField("_index") + .setValues( + new StringArray( + criterion.getValues().stream() + .map(value -> String.join("", value.split("_"))) + .map(indexConvention::getEntityIndexName) + .collect(Collectors.toList()))) + .setValue( + indexConvention.getEntityIndexName(String.join("", criterion.getValue().split("_")))); } - private static ConjunctiveCriterion transformConjunctiveCriterion(ConjunctiveCriterion conjunctiveCriterion, - IndexConvention indexConvention) { - return new ConjunctiveCriterion().setAnd( - conjunctiveCriterion.getAnd().stream().map( - criterion -> criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) - ? transformEntityTypeCriterion(criterion, indexConvention) - : criterion) - .collect(Collectors.toCollection(CriterionArray::new))); + private static ConjunctiveCriterion transformConjunctiveCriterion( + ConjunctiveCriterion conjunctiveCriterion, IndexConvention indexConvention) { + return new ConjunctiveCriterion() + .setAnd( + conjunctiveCriterion.getAnd().stream() + .map( + criterion -> + criterion.getField().equalsIgnoreCase(INDEX_VIRTUAL_FIELD) + ? transformEntityTypeCriterion(criterion, indexConvention) + : criterion) + .collect(Collectors.toCollection(CriterionArray::new))); } - private static ConjunctiveCriterionArray transformConjunctiveCriterionArray(ConjunctiveCriterionArray criterionArray, - IndexConvention indexConvention) { + private static ConjunctiveCriterionArray transformConjunctiveCriterionArray( + ConjunctiveCriterionArray criterionArray, IndexConvention indexConvention) { return new ConjunctiveCriterionArray( - criterionArray.stream().map( - conjunctiveCriterion -> transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) + criterionArray.stream() + .map( + conjunctiveCriterion -> + transformConjunctiveCriterion(conjunctiveCriterion, indexConvention)) .collect(Collectors.toList())); } /** - * Allows filtering on entities which are stored as different indices under the hood by transforming the tag - * _entityType to _index and updating the type to the index name. + * Allows filtering on entities which are stored as different indices under the hood by + * transforming the tag _entityType to _index and updating the type to the index name. * - * @param filter The filter to parse and transform if needed + * @param filter The filter to parse and transform if needed * @param indexConvention The index convention used to generate the index name for an entity * @return A filter, with the changes if necessary */ - public static Filter transformFilterForEntities(Filter filter, @Nonnull IndexConvention indexConvention) { + public static Filter transformFilterForEntities( + Filter filter, @Nonnull IndexConvention indexConvention) { if (filter != null && filter.getOr() != null) { - return new Filter().setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); + return new Filter() + .setOr(transformConjunctiveCriterionArray(filter.getOr(), indexConvention)); } return filter; } /** - * Applies a default filter to remove entities that are soft deleted only if there isn't a filter for the REMOVED field already + * Applies a default filter to remove entities that are soft deleted only if there isn't a filter + * for the REMOVED field already */ - public static BoolQueryBuilder filterSoftDeletedByDefault(@Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { + public static BoolQueryBuilder filterSoftDeletedByDefault( + @Nullable Filter filter, @Nullable BoolQueryBuilder filterQuery) { boolean removedInOrFilter = false; if (filter != null) { - removedInOrFilter = filter.getOr().stream().anyMatch( - or -> or.getAnd().stream().anyMatch(criterion -> criterion.getField().equals(REMOVED) || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX)) - ); + removedInOrFilter = + filter.getOr().stream() + .anyMatch( + or -> + or.getAnd().stream() + .anyMatch( + criterion -> + criterion.getField().equals(REMOVED) + || criterion.getField().equals(REMOVED + KEYWORD_SUFFIX))); } if (!removedInOrFilter) { filterQuery.mustNot(QueryBuilders.matchQuery(REMOVED, true)); diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java index f4be950575624..b0f42231b27f3 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/SystemMetadataUtils.java @@ -7,12 +7,11 @@ @Slf4j public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} - public static SystemMetadata createDefaultSystemMetadata() { - return new SystemMetadata() - .setRunId(Constants.DEFAULT_RUN_ID) - .setLastObserved(System.currentTimeMillis()); - } + public static SystemMetadata createDefaultSystemMetadata() { + return new SystemMetadata() + .setRunId(Constants.DEFAULT_RUN_ID) + .setLastObserved(System.currentTimeMillis()); + } } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java index 4179345370007..4a3f78fcef7bd 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConvention.java @@ -6,10 +6,7 @@ import java.util.Optional; import javax.annotation.Nonnull; - -/** - * The convention for naming search indices - */ +/** The convention for naming search indices */ public interface IndexConvention { Optional getPrefix(); @@ -36,6 +33,7 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param indexName The index name to parse * @return a string, the entity name that that index is for, or empty if one cannot be extracted */ @@ -43,9 +41,10 @@ public interface IndexConvention { /** * Inverse of getEntityIndexName + * * @param timeseriesAspectIndexName The index name to parse - * @return a pair of strings, the entity name and the aspect name that that index is for, - * or empty if one cannot be extracted + * @return a pair of strings, the entity name and the aspect name that that index is for, or empty + * if one cannot be extracted */ Optional> getEntityAndAspectName(String timeseriesAspectIndexName); } diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java index e607139203b57..764630eb73973 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImpl.java @@ -10,7 +10,6 @@ import javax.annotation.Nullable; import org.apache.commons.lang3.StringUtils; - // Default implementation of search index naming convention public class IndexConventionImpl implements IndexConvention { // Map from Entity name -> Index name @@ -19,17 +18,25 @@ public class IndexConventionImpl implements IndexConvention { private final String _getAllEntityIndicesPattern; private final String _getAllTimeseriesIndicesPattern; - private final static String ENTITY_INDEX_VERSION = "v2"; - private final static String ENTITY_INDEX_SUFFIX = "index"; - private final static String TIMESERIES_INDEX_VERSION = "v1"; - private final static String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; + private static final String ENTITY_INDEX_VERSION = "v2"; + private static final String ENTITY_INDEX_SUFFIX = "index"; + private static final String TIMESERIES_INDEX_VERSION = "v1"; + private static final String TIMESERIES_ENTITY_INDEX_SUFFIX = "aspect"; public IndexConventionImpl(@Nullable String prefix) { _prefix = StringUtils.isEmpty(prefix) ? Optional.empty() : Optional.of(prefix); _getAllEntityIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + ENTITY_INDEX_SUFFIX + "_" + ENTITY_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + ENTITY_INDEX_SUFFIX + + "_" + + ENTITY_INDEX_VERSION; _getAllTimeseriesIndicesPattern = - _prefix.map(p -> p + "_").orElse("") + "*" + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION; + _prefix.map(p -> p + "_").orElse("") + + "*" + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + + TIMESERIES_INDEX_VERSION; } private String createIndexName(String baseName) { @@ -85,7 +92,9 @@ public String getEntityIndexName(String entityName) { @Nonnull @Override public String getTimeseriesAspectIndexName(String entityName, String aspectName) { - return this.getIndexName(entityName + "_" + aspectName) + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + return this.getIndexName(entityName + "_" + aspectName) + + TIMESERIES_ENTITY_INDEX_SUFFIX + + "_" + TIMESERIES_INDEX_VERSION; } @@ -108,8 +117,10 @@ public Optional getEntityName(String indexName) { @Override public Optional> getEntityAndAspectName(String timeseriesAspectIndexName) { - Optional entityAndAspect = extractIndexBase(timeseriesAspectIndexName, TIMESERIES_ENTITY_INDEX_SUFFIX + "_" - + TIMESERIES_INDEX_VERSION); + Optional entityAndAspect = + extractIndexBase( + timeseriesAspectIndexName, + TIMESERIES_ENTITY_INDEX_SUFFIX + "_" + TIMESERIES_INDEX_VERSION); if (entityAndAspect.isPresent()) { String[] entityAndAspectTokens = entityAndAspect.get().split("_"); if (entityAndAspectTokens.length == 2) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java index 982557f2b5358..885ed74d11471 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/exception/UnsupportedGraphEntities.java @@ -1,8 +1,6 @@ package com.linkedin.metadata.utils.exception; -/** - * An exception to be thrown when certain graph entities are not supported. - */ +/** An exception to be thrown when certain graph entities are not supported. */ public class UnsupportedGraphEntities extends RuntimeException { public UnsupportedGraphEntities(String message) { diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java index d053272c19e7d..67f0ae4c77eaf 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/log/LogMessageFilter.java @@ -6,16 +6,13 @@ import java.util.ArrayList; import java.util.List; - /** - * A Log Filter that can be configured to omit logs containing a specific message string. - * Configured inside logback.xml. + * A Log Filter that can be configured to omit logs containing a specific message string. Configured + * inside logback.xml. */ public class LogMessageFilter extends AbstractMatcherFilter { - /** - * A set of messages to exclude. - */ + /** A set of messages to exclude. */ private final List excluded = new ArrayList<>(); @Override @@ -33,4 +30,4 @@ public FilterReply decide(ILoggingEvent event) { public void addExcluded(String message) { this.excluded.add(message); } -} \ No newline at end of file +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java index 9a8848e090fb8..3a47c11f8d748 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/utils/metrics/MetricUtils.java @@ -7,10 +7,8 @@ import com.codahale.metrics.Timer; import com.codahale.metrics.jmx.JmxReporter; - public class MetricUtils { - private MetricUtils() { - } + private MetricUtils() {} public static final String DELIMITER = "_"; @@ -32,7 +30,8 @@ public static Counter counter(Class klass, String metricName) { public static void exceptionCounter(Class klass, String metricName, Throwable t) { String[] splitClassName = t.getClass().getName().split("[.]"); - String snakeCase = splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); + String snakeCase = + splitClassName[splitClassName.length - 1].replaceAll("([A-Z][a-z])", DELIMITER + "$1"); counter(klass, metricName).inc(); counter(klass, metricName + DELIMITER + snakeCase).inc(); @@ -50,7 +49,8 @@ public static Timer timer(String metricName) { return REGISTRY.timer(MetricRegistry.name(metricName)); } - public static > T gauge(Class clazz, String metricName, MetricRegistry.MetricSupplier supplier) { + public static > T gauge( + Class clazz, String metricName, MetricRegistry.MetricSupplier supplier) { return REGISTRY.gauge(MetricRegistry.name(clazz, metricName), supplier); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java index e120fdb3b342f..7a6479a313244 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.datahub.test.KeyPartEnum; import com.datahub.test.TestEntityKey; import com.linkedin.common.urn.Urn; @@ -8,11 +10,8 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class EntityKeyUtilsTest { @Test @@ -22,36 +21,42 @@ public void testConvertEntityKeyToUrn() throws Exception { key.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); key.setKeyPart3(KeyPartEnum.VALUE_1); - final Urn expectedUrn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn expectedUrn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final Urn actualUrn = EntityKeyUtils.convertEntityKeyToUrn(key, "testEntity1"); assertEquals(actualUrn.toString(), expectedUrn.toString()); } @Test public void testConvertEntityKeyToUrnInternal() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity1:(part1,urn:li:testEntity2:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity2:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKeyInternal(urn, expectedKey.schema()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } @Test public void testConvertEntityUrnToKey() throws Exception { - final Urn urn = Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); + final Urn urn = + Urn.createFromString("urn:li:testEntity:(part1,urn:li:testEntity:part2,VALUE_1)"); final TestEntityKey expectedKey = new TestEntityKey(); expectedKey.setKeyPart1("part1"); expectedKey.setKeyPart2(Urn.createFromString("urn:li:testEntity:part2")); expectedKey.setKeyPart3(KeyPartEnum.VALUE_1); - ConfigEntityRegistry entityRegistry = new ConfigEntityRegistry( - TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); + ConfigEntityRegistry entityRegistry = + new ConfigEntityRegistry( + TestEntityKey.class.getClassLoader().getResourceAsStream("test-entity-registry.yml")); final EntitySpec entitySpec = entityRegistry.getEntitySpec(PegasusUtils.urnToEntityName(urn)); - final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); + final RecordTemplate actualKey = + EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java index 8b2078c7b9533..6288ed80e6881 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/IngestionUtilsTest.java @@ -1,29 +1,29 @@ package com.linkedin.metadata.utils; -import org.testng.annotations.Test; - - import static org.testng.Assert.assertEquals; +import org.testng.annotations.Test; + public class IngestionUtilsTest { private final String ingestionSourceUrn = "urn:li:ingestionSource:12345"; @Test public void injectPipelineNameWhenThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"test\"}"; assertEquals(recipe, IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn)); } @Test public void injectPipelineNameWhenNotThere() { - String recipe = "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; + String recipe = + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}}}"; recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn); assertEquals( recipe, - "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}" - ); + "{\"source\":{\"type\":\"snowflake\",\"config\":{\"stateful_ingestion\":{\"enabled\":true}}},\"pipeline_name\":\"urn:li:ingestionSource:12345\"}"); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java index b60b7fb64f3f9..fd606f57477a0 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/SearchUtilTest.java @@ -1,5 +1,7 @@ package com.linkedin.metadata.utils; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.FilterValue; @@ -9,12 +11,7 @@ import java.util.Set; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - -/** - * Tests the capabilities of {@link EntityKeyUtils} - */ +/** Tests the capabilities of {@link EntityKeyUtils} */ public class SearchUtilTest { @Test @@ -25,21 +22,22 @@ public void testConvertToFilters() throws Exception { Set filteredValues = ImmutableSet.of("urn:li:tag:def"); - List filters = - SearchUtil.convertToFilters(aggregations, filteredValues); - - assertEquals(filters.get(0), new FilterValue() - .setFiltered(false) - .setValue("urn:li:tag:abc") - .setEntity(Urn.createFromString("urn:li:tag:abc")) - .setFacetCount(3L) - ); - - assertEquals(filters.get(1), new FilterValue() - .setFiltered(true) - .setValue("urn:li:tag:def") - .setEntity(Urn.createFromString("urn:li:tag:def")) - .setFacetCount(0L) - ); + List filters = SearchUtil.convertToFilters(aggregations, filteredValues); + + assertEquals( + filters.get(0), + new FilterValue() + .setFiltered(false) + .setValue("urn:li:tag:abc") + .setEntity(Urn.createFromString("urn:li:tag:abc")) + .setFacetCount(3L)); + + assertEquals( + filters.get(1), + new FilterValue() + .setFiltered(true) + .setValue("urn:li:tag:def") + .setEntity(Urn.createFromString("urn:li:tag:def")) + .setFacetCount(0L)); } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java index 5310871140fc9..f3e52c9989775 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/elasticsearch/IndexConventionImplTest.java @@ -1,12 +1,11 @@ package com.linkedin.metadata.utils.elasticsearch; +import static org.testng.Assert.*; + import com.linkedin.util.Pair; import java.util.Optional; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class IndexConventionImplTest { @Test @@ -19,7 +18,9 @@ public void testIndexConventionNoPrefix() { assertEquals(indexConventionNoPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionNoPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.of("dashboard")); + assertEquals( + indexConventionNoPrefix.getEntityName("dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); } @Test @@ -32,22 +33,32 @@ public void testIndexConventionPrefix() { assertEquals(indexConventionPrefix.getEntityName(expectedIndexName), Optional.of(entityName)); assertEquals(indexConventionPrefix.getEntityName("totally not an index"), Optional.empty()); assertEquals(indexConventionPrefix.getEntityName("prefix_dataset_v2"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), Optional.of("dashboard")); - assertEquals(indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityName("prefix_dashboardindex_v2_1683649932260"), + Optional.of("dashboard")); + assertEquals( + indexConventionPrefix.getEntityName("dashboardindex_v2_1683649932260"), Optional.empty()); } + @Test public void testTimeseriesIndexConventionNoPrefix() { IndexConvention indexConventionNoPrefix = new IndexConventionImpl(null); String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionNoPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionNoPrefix.getPrefix(), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), Optional.of( - Pair.of(entityName, aspectName))); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dataset_v2"), Optional.empty()); - assertEquals(indexConventionNoPrefix.getEntityAndAspectName("dashboard_dashboardusagestatisticsaspect_v1"), + assertEquals( + indexConventionNoPrefix.getEntityAndAspectName( + "dashboard_dashboardusagestatisticsaspect_v1"), Optional.of(Pair.of("dashboard", "dashboardusagestatistics"))); } @@ -57,10 +68,17 @@ public void testTimeseriesIndexConventionPrefix() { String entityName = "dataset"; String aspectName = "datasetusagestatistics"; String expectedIndexName = "prefix_dataset_datasetusagestatisticsaspect_v1"; - assertEquals(indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), expectedIndexName); + assertEquals( + indexConventionPrefix.getTimeseriesAspectIndexName(entityName, aspectName), + expectedIndexName); assertEquals(indexConventionPrefix.getPrefix(), Optional.of("prefix")); - assertEquals(indexConventionPrefix.getEntityAndAspectName(expectedIndexName), Optional.of(Pair.of(entityName, aspectName))); - assertEquals(indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); - assertEquals(indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName(expectedIndexName), + Optional.of(Pair.of(entityName, aspectName))); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("totally not an index"), Optional.empty()); + assertEquals( + indexConventionPrefix.getEntityAndAspectName("prefix_datasetusagestatisticsaspect_v1"), + Optional.empty()); } } diff --git a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java index 594bc583eeef0..92321cce3d905 100644 --- a/mock-entity-registry/src/main/java/mock/MockAspectSpec.java +++ b/mock-entity-registry/src/main/java/mock/MockAspectSpec.java @@ -12,16 +12,24 @@ import java.util.List; import javax.annotation.Nonnull; - public class MockAspectSpec extends AspectSpec { - public MockAspectSpec(@Nonnull AspectAnnotation aspectAnnotation, + public MockAspectSpec( + @Nonnull AspectAnnotation aspectAnnotation, @Nonnull List searchableFieldSpecs, @Nonnull List searchScoreFieldSpecs, @Nonnull List relationshipFieldSpecs, @Nonnull List timeseriesFieldSpecs, - @Nonnull List timeseriesFieldCollectionSpecs, RecordDataSchema schema, + @Nonnull List timeseriesFieldCollectionSpecs, + RecordDataSchema schema, Class aspectClass) { - super(aspectAnnotation, searchableFieldSpecs, searchScoreFieldSpecs, relationshipFieldSpecs, timeseriesFieldSpecs, - timeseriesFieldCollectionSpecs, schema, aspectClass); + super( + aspectAnnotation, + searchableFieldSpecs, + searchScoreFieldSpecs, + relationshipFieldSpecs, + timeseriesFieldSpecs, + timeseriesFieldCollectionSpecs, + schema, + aspectClass); } } diff --git a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java index 54dd25613ed4c..a324f9ce0195b 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java +++ b/mock-entity-registry/src/main/java/mock/MockEntityRegistry.java @@ -11,7 +11,6 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - public class MockEntityRegistry implements EntityRegistry { @Nonnull @Override @@ -48,5 +47,4 @@ public AspectTemplateEngine getAspectTemplateEngine() { public Map getAspectSpecs() { return new HashMap<>(); } - } diff --git a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java index d740fff29e258..0013d6615a71d 100644 --- a/mock-entity-registry/src/main/java/mock/MockEntitySpec.java +++ b/mock-entity-registry/src/main/java/mock/MockEntitySpec.java @@ -1,5 +1,7 @@ package mock; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePaths; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -27,9 +29,6 @@ import java.util.Map; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class MockEntitySpec implements EntitySpec { private String _name; @@ -41,7 +40,8 @@ public MockEntitySpec(String name) { if (DATASET_ENTITY_NAME.equals(name)) { _aspectTypeMap.put(BROWSE_PATHS_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_ASPECT_NAME)); _aspectTypeMap.put(BROWSE_PATHS_V2_ASPECT_NAME, getAspectSpec(BROWSE_PATHS_V2_ASPECT_NAME)); - _aspectTypeMap.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); + _aspectTypeMap.put( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, getAspectSpec(DATA_PLATFORM_INSTANCE_ASPECT_NAME)); } } @@ -81,16 +81,23 @@ public AspectSpec getKeyAspectSpec() { return null; } - public AspectSpec createAspectSpec(T type, String name) { - return new MockAspectSpec(new AspectAnnotation(name, false, false, null), - Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), - Collections.emptyList(), type.schema(), (Class) type.getClass().asSubclass(RecordTemplate.class)); + public AspectSpec createAspectSpec(T type, String name) { + return new MockAspectSpec( + new AspectAnnotation(name, false, false, null), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + Collections.emptyList(), + type.schema(), + (Class) type.getClass().asSubclass(RecordTemplate.class)); } @Override public List getAspectSpecs() { - return ASPECT_TYPE_MAP.keySet().stream().map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)).collect( - Collectors.toList()); + return ASPECT_TYPE_MAP.keySet().stream() + .map(name -> createAspectSpec(ASPECT_TYPE_MAP.get(name), name)) + .collect(Collectors.toList()); } @Override @@ -118,6 +125,7 @@ public Boolean hasAspect(String name) { ASPECT_TYPE_MAP.put(BROWSE_PATHS_V2_ASPECT_NAME, new BrowsePathsV2()); ASPECT_TYPE_MAP.put(DATA_PLATFORM_INSTANCE_ASPECT_NAME, new DataPlatformInstance()); } + @Override public AspectSpec getAspectSpec(String name) { return createAspectSpec(ASPECT_TYPE_MAP.get(name), name); diff --git a/test-models/src/main/java/com/datahub/utils/TestUtils.java b/test-models/src/main/java/com/datahub/utils/TestUtils.java index 1aca3a890caa6..6a2d219fa9b4d 100644 --- a/test-models/src/main/java/com/datahub/utils/TestUtils.java +++ b/test-models/src/main/java/com/datahub/utils/TestUtils.java @@ -9,7 +9,6 @@ import javax.annotation.Nonnull; import org.apache.commons.io.IOUtils; - public final class TestUtils { private TestUtils() { // Util class @@ -18,7 +17,8 @@ private TestUtils() { @Nonnull public static String loadJsonFromResource(@Nonnull String resourceName) throws IOException { final String jsonStr = - IOUtils.toString(ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); + IOUtils.toString( + ClassLoader.getSystemResourceAsStream(resourceName), Charset.defaultCharset()); return jsonStr.replaceAll("\\s+", ""); } @@ -45,4 +45,3 @@ public static BarUrn makeBarUrn(int id) { return new BarUrn(id); } } - diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java index 6b1cd545ba00d..c9d308522f6b9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BarUrn extends Urn { public static final String ENTITY_TYPE = "bar"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BarUrn(int id) { @@ -22,7 +22,8 @@ public int getBarIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java index 8970a011eca14..774da2687893b 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BarUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BarUrnCoercer extends BaseUrnCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BarUrnCoercer(), BarUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java index ab0c28f9fbb9b..4fffa8b4f2558 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BaseUrnCoercer.java @@ -5,10 +5,8 @@ import com.linkedin.data.template.TemplateOutputCastException; import java.net.URISyntaxException; - public abstract class BaseUrnCoercer implements DirectCoercer { - public BaseUrnCoercer() { - } + public BaseUrnCoercer() {} public Object coerceInput(T object) throws ClassCastException { return object.toString(); diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java index dddf7721c64a8..81e0adab84472 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class BazUrn extends Urn { public static final String ENTITY_TYPE = "baz"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public BazUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getBazIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java index 87b8929d236db..33ca9d0b060c6 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/BazUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class BazUrnCoercer extends BaseUrnCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new BazUrnCoercer(), BazUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java index a8f2bab3c21dd..1047e39f9905f 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrn.java @@ -3,11 +3,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class FooUrn extends Urn { public static final String ENTITY_TYPE = "foo"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public FooUrn(int id) throws URISyntaxException { @@ -21,7 +21,8 @@ public int getFooIdEntity() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java index a2d65dc5f8bd3..3e7bd95fdf3bc 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/FooUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class FooUrnCoercer extends BaseUrnCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new FooUrnCoercer(), FooUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java index 6cec6042401a1..bfa22bdeb7f90 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrn.java @@ -4,11 +4,11 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class PizzaUrn extends Urn { public static final String ENTITY_TYPE = "pizza"; - // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / external URN definitions. + // Can be obtained via getEntityKey, but not in open source. We need to unify the internal / + // external URN definitions. private final int _id; public PizzaUrn(int id) { @@ -22,7 +22,8 @@ public int getPizzaId() { @Override public boolean equals(Object obj) { - // Override for find bugs, bug delegate to super implementation, both in open source and internally. + // Override for find bugs, bug delegate to super implementation, both in open source and + // internally. return super.equals(obj); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java index 64bfffe03f77d..30af8171e0eef 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/PizzaUrnCoercer.java @@ -2,7 +2,7 @@ import com.linkedin.data.template.Custom; - public class PizzaUrnCoercer extends BaseUrnCoercer { - private static final boolean REGISTER_COERCER = Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); + private static final boolean REGISTER_COERCER = + Custom.registerCoercer(new PizzaUrnCoercer(), PizzaUrn.class); } diff --git a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java index 8467f15f85a49..7af0eb39c70d9 100644 --- a/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java +++ b/test-models/src/main/javaPegasus/com/datahub/test/testing/urn/SingleAspectEntityUrn.java @@ -3,7 +3,6 @@ import com.linkedin.common.urn.Urn; import java.net.URISyntaxException; - public final class SingleAspectEntityUrn extends Urn { private static final String ENTITY_TYPE = "entitySingleAspectEntity";